From b10dbad707ae5d028aca3513b2d9d2d6193e2377 Mon Sep 17 00:00:00 2001 From: RatkoVasic-NOAA <37597874+RatkoVasic-NOAA@users.noreply.github.com> Date: Mon, 26 Feb 2024 13:47:54 -0500 Subject: [PATCH 01/42] [develop] Changes for Rocky8 on Hera (#1041) Hera is switching from CentOS to Rocky OS. * One of changes needed for SRW to run on both Rocky and CentOS is to update cmake version from 3.20.1 to 3.23.1 * Some HPSS batch jobs need more than 2GB * Fix in bash scripts/exregional_make_lbcs.sh --- modulefiles/build_hera_gnu.lua | 3 +++ modulefiles/build_hera_intel.lua | 5 ++++- parm/wflow/coldstart.yaml | 4 ++-- scripts/exregional_make_lbcs.sh | 2 +- ush/machine/jet.yaml | 2 +- 5 files changed, 11 insertions(+), 5 deletions(-) diff --git a/modulefiles/build_hera_gnu.lua b/modulefiles/build_hera_gnu.lua index c1f57e2115..90bd671b5a 100644 --- a/modulefiles/build_hera_gnu.lua +++ b/modulefiles/build_hera_gnu.lua @@ -5,6 +5,9 @@ the NOAA RDHPC machine Hera using GNU 9.2.0 whatis([===[Loads libraries needed for building the UFS SRW App on Hera using GNU 9.2.0 ]===]) +-- When Hera switches from CentOS to Rocky, replace line withh correct path to spack-stack +-- If you want to use Rocky OS now, use line below +--prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.0/envs/unified-env-rocky8/install/modulefiles/Core") prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.0/envs/unified-env-noavx512/install/modulefiles/Core") prepend_path("MODULEPATH", "/scratch1/NCEPDEV/jcsda/jedipara/spack-stack/modulefiles") diff --git a/modulefiles/build_hera_intel.lua b/modulefiles/build_hera_intel.lua index 500d410dc5..314fd89183 100644 --- a/modulefiles/build_hera_intel.lua +++ b/modulefiles/build_hera_intel.lua @@ -8,6 +8,9 @@ whatis([===[Loads libraries needed for building the UFS SRW App on Hera ]===]) prepend_path("MODULEPATH","/contrib/sutils/modulefiles") load("sutils") +-- When Hera switches from CentOS to Rocky, replace line withh correct path to spack-stack +-- If you want to use Rocky OS now, use line below +--prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.0/envs/unified-env-rocky8/install/modulefiles/Core") prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.0/envs/unified-env-noavx512/install/modulefiles/Core") prepend_path("MODULEPATH", "/scratch1/NCEPDEV/jcsda/jedipara/spack-stack/modulefiles") @@ -20,7 +23,7 @@ load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver)) stack_python_ver=os.getenv("stack_python_ver") or "3.10.8" load(pathJoin("stack-python", stack_python_ver)) -cmake_ver=os.getenv("cmake_ver") or "3.20.1" +cmake_ver=os.getenv("cmake_ver") or "3.23.1" load(pathJoin("cmake", cmake_ver)) load("srw_common") diff --git a/parm/wflow/coldstart.yaml b/parm/wflow/coldstart.yaml index e707b51d92..002d7f7b96 100644 --- a/parm/wflow/coldstart.yaml +++ b/parm/wflow/coldstart.yaml @@ -28,7 +28,7 @@ task_get_extrn_ics: <<: *default_vars ICS_OR_LBCS: ICS join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' - memory: 2G + memory: 4G nnodes: 1 native: '{% if platform.get("SCHED_NATIVE_CMD_HPSS") %}{{ platform.SCHED_NATIVE_CMD_HPSS }}{% else %}{{ platform.SCHED_NATIVE_CMD}}{% endif %}' partition: '{% if platform.get("PARTITION_HPSS") %}&PARTITION_HPSS;{% else %}None{% endif %}' @@ -59,7 +59,7 @@ task_get_extrn_lbcs: <<: *default_vars ICS_OR_LBCS: LBCS join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' - memory: 2G + memory: 4G nnodes: 1 native: '{% if platform.get("SCHED_NATIVE_CMD_HPSS") %}{{ platform.SCHED_NATIVE_CMD_HPSS }}{% else %}{{ platform.SCHED_NATIVE_CMD}}{% endif %}' partition: '{% if platform.get("PARTITION_HPSS") %}&PARTITION_HPSS;{% else %}None{% endif %}' diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh index 72f9369ff6..695af1b409 100755 --- a/scripts/exregional_make_lbcs.sh +++ b/scripts/exregional_make_lbcs.sh @@ -410,7 +410,7 @@ for (( ii=0; ii<${num_fhrs}; ii=ii+bcgrpnum10 )); do fi ;; "GDAS") - fn_atm="${EXTRN_MDL_FNS[0][$i]}" + fn_atm="${EXTRN_MDL_FNS[$i]}" ;; "GEFS") fn_grib2="${EXTRN_MDL_FNS[$i]}" diff --git a/ush/machine/jet.yaml b/ush/machine/jet.yaml index 93d375ee02..bef698f874 100644 --- a/ush/machine/jet.yaml +++ b/ush/machine/jet.yaml @@ -9,7 +9,7 @@ platform: DOMAIN_PREGEN_BASEDIR: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/FV3LAM_pregen PARTITION_DEFAULT: sjet,vjet,kjet,xjet QUEUE_DEFAULT: batch - PARTITION_FCST: xjet + PARTITION_FCST: vjet QUEUE_FCST: batch PARTITION_HPSS: service QUEUE_HPSS: batch From a302ed5613d8dd2a04e61dfe358d87a4f1c31ba4 Mon Sep 17 00:00:00 2001 From: EdwardSnyder-NOAA <96196752+EdwardSnyder-NOAA@users.noreply.github.com> Date: Wed, 28 Feb 2024 09:33:23 -0600 Subject: [PATCH 02/42] [develop] Add three UFS Case Studies to WE2E testing process (#1043) Adding three additional UFS Case Studies (2019 Hurricane Barry, 2019 Halloween Storm, and 2020 July CAPE) to the workflow end-to-end testing process. * The UFS Case studies test yamls were created and added to the gaea-c5, derecho, and hera.gnu.com coverage suite files as well as the comprehensive suite files. * Removed Cheyenne related logic from current WE2E UFS Case Study (2020_CAD) and extended fcst wall time for it. --------- Co-authored-by: Parallel Works app-run user --- tests/WE2E/machine_suites/comprehensive | 3 ++ .../WE2E/machine_suites/comprehensive.derecho | 3 ++ .../machine_suites/comprehensive.noaacloud | 3 ++ tests/WE2E/machine_suites/comprehensive.orion | 3 ++ tests/WE2E/machine_suites/coverage.derecho | 1 + tests/WE2E/machine_suites/coverage.gaea-c5 | 1 + .../WE2E/machine_suites/coverage.hera.gnu.com | 1 + .../config.2019_halloween_storm.yaml | 38 +++++++++++++++++++ .../config.2019_hurricane_barry.yaml | 38 +++++++++++++++++++ .../ufs_case_studies/config.2020_CAD.yaml | 21 ++++------ .../ufs_case_studies/config.2020_CAPE.yaml | 36 ++++++++++++++++++ 11 files changed, 135 insertions(+), 13 deletions(-) create mode 100644 tests/WE2E/test_configs/ufs_case_studies/config.2019_halloween_storm.yaml create mode 100644 tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_barry.yaml create mode 100644 tests/WE2E/test_configs/ufs_case_studies/config.2020_CAPE.yaml diff --git a/tests/WE2E/machine_suites/comprehensive b/tests/WE2E/machine_suites/comprehensive index 7fdb30046a..a416408056 100644 --- a/tests/WE2E/machine_suites/comprehensive +++ b/tests/WE2E/machine_suites/comprehensive @@ -1,4 +1,7 @@ 2020_CAD +2020_CAPE +2019_hurricane_barry +2019_halloween_storm community custom_ESGgrid custom_ESGgrid_Central_Asia_3km diff --git a/tests/WE2E/machine_suites/comprehensive.derecho b/tests/WE2E/machine_suites/comprehensive.derecho index 5bf5b4db24..1fa9d1c055 100644 --- a/tests/WE2E/machine_suites/comprehensive.derecho +++ b/tests/WE2E/machine_suites/comprehensive.derecho @@ -1,4 +1,7 @@ 2020_CAD +2020_CAPE +2019_hurricane_barry +2019_halloween_storm community custom_ESGgrid #custom_ESGgrid_Central_Asia_3km diff --git a/tests/WE2E/machine_suites/comprehensive.noaacloud b/tests/WE2E/machine_suites/comprehensive.noaacloud index d44160244a..f81d8c9d1a 100644 --- a/tests/WE2E/machine_suites/comprehensive.noaacloud +++ b/tests/WE2E/machine_suites/comprehensive.noaacloud @@ -59,6 +59,9 @@ pregen_grid_orog_sfc_climo specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS specify_template_filenames 2020_CAD +2020_CAPE +2019_hurricane_barry +2019_halloween_storm get_from_AWS_ics_GEFS_lbcs_GEFS_fmt_grib2_2022040400_ensemble_2mems get_from_NOMADS_ics_FV3GFS_lbcs_FV3GFS long_fcst diff --git a/tests/WE2E/machine_suites/comprehensive.orion b/tests/WE2E/machine_suites/comprehensive.orion index 671756e294..b5b65c668b 100644 --- a/tests/WE2E/machine_suites/comprehensive.orion +++ b/tests/WE2E/machine_suites/comprehensive.orion @@ -1,4 +1,7 @@ 2020_CAD +2020_CAPE +2019_hurricane_barry +2019_halloween_storm community custom_ESGgrid custom_ESGgrid_Central_Asia_3km diff --git a/tests/WE2E/machine_suites/coverage.derecho b/tests/WE2E/machine_suites/coverage.derecho index 19bbc623c7..3475caebcc 100644 --- a/tests/WE2E/machine_suites/coverage.derecho +++ b/tests/WE2E/machine_suites/coverage.derecho @@ -7,3 +7,4 @@ grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_HRRR_suite_HRRR nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16 pregen_grid_orog_sfc_climo specify_template_filenames +2019_hurricane_barry diff --git a/tests/WE2E/machine_suites/coverage.gaea-c5 b/tests/WE2E/machine_suites/coverage.gaea-c5 index 4ff7f61f3c..068077464d 100644 --- a/tests/WE2E/machine_suites/coverage.gaea-c5 +++ b/tests/WE2E/machine_suites/coverage.gaea-c5 @@ -9,3 +9,4 @@ grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot nco_ensemble nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km +2020_CAPE diff --git a/tests/WE2E/machine_suites/coverage.hera.gnu.com b/tests/WE2E/machine_suites/coverage.hera.gnu.com index be66d82fb8..75533b4609 100644 --- a/tests/WE2E/machine_suites/coverage.hera.gnu.com +++ b/tests/WE2E/machine_suites/coverage.hera.gnu.com @@ -8,3 +8,4 @@ long_fcst MET_verification_only_vx MET_ensemble_verification_only_vx_time_lag nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 +2019_halloween_storm diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2019_halloween_storm.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2019_halloween_storm.yaml new file mode 100644 index 0000000000..9784d7bb44 --- /dev/null +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2019_halloween_storm.yaml @@ -0,0 +1,38 @@ +metadata: + description: |- + This test is to ensure that the workflow running in community mode + completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16 + physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms + for 2019 Halloween Storm. + # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to + # download these files, which can delay the WE2E testing process. + # To capture the event, extend the FCST_LEN_HRS from 6 to 90. +user: + RUN_ENVIR: community +platform: + EXTRN_MDL_DATA_STORES: aws +workflow: + CCPP_PHYS_SUITE: FV3_GFS_v16 + PREDEF_GRID_NAME: RRFS_CONUS_13km + DATE_FIRST_CYCL: '2019102812' + DATE_LAST_CYCL: '2019102812' + FCST_LEN_HRS: 6 + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY + FV3GFS_FILE_FMT_ICS: nemsio +task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY + LBC_SPEC_INTVL_HRS: 3 + FV3GFS_FILE_FMT_LBCS: nemsio +rocoto: + tasks: + task_get_extrn_ics: + walltime: 06:00:00 + task_get_extrn_lbcs: + walltime: 06:00:00 + metatask_run_ensemble: + task_make_lbcs_mem#mem#: + walltime: 06:00:00 + task_run_fcst_mem#mem#: + walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_barry.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_barry.yaml new file mode 100644 index 0000000000..7e766b6ff9 --- /dev/null +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_barry.yaml @@ -0,0 +1,38 @@ +metadata: + description: |- + This test is to ensure that the workflow running in community mode + completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16 + physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms + for 2019 Hurricane Barry. + # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to + # download these files, which can delay the WE2E testing process. + # To capture the event, extend the FCST_LEN_HRS from 6 to 90. +user: + RUN_ENVIR: community +platform: + EXTRN_MDL_DATA_STORES: aws +workflow: + CCPP_PHYS_SUITE: FV3_GFS_v16 + PREDEF_GRID_NAME: RRFS_CONUS_13km + DATE_FIRST_CYCL: '2019071200' + DATE_LAST_CYCL: '2019071200' + FCST_LEN_HRS: 6 + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY + FV3GFS_FILE_FMT_ICS: nemsio +task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY + LBC_SPEC_INTVL_HRS: 3 + FV3GFS_FILE_FMT_LBCS: nemsio +rocoto: + tasks: + task_get_extrn_ics: + walltime: 06:00:00 + task_get_extrn_lbcs: + walltime: 06:00:00 + metatask_run_ensemble: + task_make_lbcs_mem#mem#: + walltime: 06:00:00 + task_run_fcst_mem#mem#: + walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2020_CAD.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2020_CAD.yaml index 71e664e17a..fd5740be5d 100644 --- a/tests/WE2E/test_configs/ufs_case_studies/config.2020_CAD.yaml +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2020_CAD.yaml @@ -1,19 +1,12 @@ metadata: description: |- This test is to ensure that the workflow running in community mode - completes successfully on the RRFS_CONUS_13km grid using the GFS_v16 - physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms for 2020 Cold Air Damming case. - #Users can modify date for other test cases listed in the UFS-CASE-STUDIES platforms - #Note for runnning this test on Cheyenne: please modify this config as follows - #task_get_extrn_ics: - # EXTRN_MDL_NAME_ICS: FV3GFS - # FV3GFS_FILE_FMT_ICS: nemsio - # USE_USER_STAGED_EXTRN_FILES: true - #task_get_extrn_lbcs: - # EXTRN_MDL_NAME_LBCS: FV3GFS - # LBC_SPEC_INTVL_HRS: 3 - # FV3GFS_FILE_FMT_LBCS: nemsio - # USE_USER_STAGED_EXTRN_FILES: true + completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16 + physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms + for 2020 Cold Air Damming case. + # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to + # download these files, which can delay the WE2E testing process. + # To capture the event, extend the FCST_LEN_HRS from 6 to 90. user: RUN_ENVIR: community platform: @@ -41,3 +34,5 @@ rocoto: metatask_run_ensemble: task_make_lbcs_mem#mem#: walltime: 06:00:00 + task_run_fcst_mem#mem#: + walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2020_CAPE.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2020_CAPE.yaml new file mode 100644 index 0000000000..cb765604d6 --- /dev/null +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2020_CAPE.yaml @@ -0,0 +1,36 @@ +metadata: + description: |- + This test is to ensure that the workflow running in community mode + completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16 + physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms + for 2020 July Convective Available Potential Energy. + # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to + # download these files, which can delay the WE2E testing process. + # To capture the event, extend the FCST_LEN_HRS from 6 to 24. +user: + RUN_ENVIR: community +platform: + EXTRN_MDL_DATA_STORES: aws +workflow: + CCPP_PHYS_SUITE: FV3_GFS_v16 + PREDEF_GRID_NAME: RRFS_CONUS_13km + DATE_FIRST_CYCL: '2020072300' + DATE_LAST_CYCL: '2020072300' + FCST_LEN_HRS: 6 + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY + FV3GFS_FILE_FMT_ICS: nemsio +task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY + LBC_SPEC_INTVL_HRS: 3 + FV3GFS_FILE_FMT_LBCS: nemsio +rocoto: + tasks: + task_get_extrn_ics: + walltime: 06:00:00 + task_get_extrn_lbcs: + walltime: 06:00:00 + metatask_run_ensemble: + task_make_lbcs_mem#mem#: + walltime: 06:00:00 From 8fc9a8d76e5848d46a2bf25ea6d6fd91cd1846d4 Mon Sep 17 00:00:00 2001 From: Natalie Perlin <68030316+natalie-perlin@users.noreply.github.com> Date: Thu, 29 Feb 2024 13:02:30 -0500 Subject: [PATCH 03/42] [develop] Update for Gaea-c5 (#1047) * Enable SRW to run on Gaea-c5, use spack-stack v1.5.0, and SRW-built conda environment * Update code to rename "gaea-c5" platform to "gaea". The name in for Jenkins still need to remain "gaeac5" at the moment. A solution to solve library conflict for libstdc++.so.6 was to preload a specific library during a runtime, as specified in ./modulefiles/wflow_gaea.lua , ./modulefiles/tasks/gaea/python_srw.lua: setenv("LD_PRELOAD", "/opt/cray/pe/gcc/12.2.0/snos/lib64/libstdc++.so.6") --------- Co-authored-by: Natalie Perlin Co-authored-by: michael.lueken --- .cicd/Jenkinsfile | 13 ++--- .cicd/scripts/srw_ftest.sh | 7 +-- .cicd/scripts/wrapper_srw_ftest.sh | 2 +- devbuild.sh | 7 +-- .../CustomizingTheWorkflow/ConfigWorkflow.rst | 2 +- etc/lmod-setup.csh | 2 +- etc/lmod-setup.sh | 2 +- ...gaea-c5_intel.lua => build_gaea_intel.lua} | 4 +- .../tasks/gaea-c5/plot_allvars.local.lua | 6 -- modulefiles/tasks/gaea-c5/python_srw.lua | 8 --- modulefiles/tasks/gaea/plot_allvars.local.lua | 4 ++ modulefiles/tasks/gaea/python_srw.lua | 7 +++ .../tasks/{gaea-c5 => gaea}/run_vx.local.lua | 0 .../{wflow_gaea-c5.lua => wflow_gaea.lua} | 7 +-- ...mprehensive.gaea-c5 => comprehensive.gaea} | 0 .../{coverage.gaea-c5 => coverage.gaea} | 0 tests/WE2E/setup_WE2E_tests.sh | 2 +- tests/build.sh | 2 +- ush/load_modules_wflow.sh | 7 +-- ush/machine/gaea-c5.yaml | 55 ------------------- ush/machine/gaea.yaml | 55 +++++++++++++++++++ ush/valid_param_vals.yaml | 2 +- ush/wrappers/job_cards/sbatch/get_ics.sbatch | 2 +- ush/wrappers/job_cards/sbatch/get_lbcs.sbatch | 2 +- .../job_cards/sbatch/make_grid.sbatch | 2 +- ush/wrappers/job_cards/sbatch/make_ics.sbatch | 2 +- .../job_cards/sbatch/make_lbcs.sbatch | 2 +- .../job_cards/sbatch/make_orog.sbatch | 2 +- .../job_cards/sbatch/make_sfc_climo.sbatch | 2 +- ush/wrappers/job_cards/sbatch/run_fcst.sbatch | 2 +- ush/wrappers/job_cards/sbatch/run_post.sbatch | 2 +- 31 files changed, 95 insertions(+), 117 deletions(-) rename modulefiles/{build_gaea-c5_intel.lua => build_gaea_intel.lua} (91%) delete mode 100644 modulefiles/tasks/gaea-c5/plot_allvars.local.lua delete mode 100644 modulefiles/tasks/gaea-c5/python_srw.lua create mode 100644 modulefiles/tasks/gaea/plot_allvars.local.lua create mode 100644 modulefiles/tasks/gaea/python_srw.lua rename modulefiles/tasks/{gaea-c5 => gaea}/run_vx.local.lua (100%) rename modulefiles/{wflow_gaea-c5.lua => wflow_gaea.lua} (68%) rename tests/WE2E/machine_suites/{comprehensive.gaea-c5 => comprehensive.gaea} (100%) rename tests/WE2E/machine_suites/{coverage.gaea-c5 => coverage.gaea} (100%) delete mode 100644 ush/machine/gaea-c5.yaml create mode 100644 ush/machine/gaea.yaml diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 86af5dded4..8cc95c6b00 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -10,11 +10,10 @@ pipeline { parameters { // Allow job runner to filter based on platform // Use the line below to enable all PW clusters - // choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'cheyenne', 'gaeac5', 'hera', 'jet', 'orion', 'hercules', 'pclusternoaav2use1', 'azclusternoaav2eus1', 'gclusternoaav2usc1'], description: 'Specify the platform(s) to use') + // choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'cheyenne', 'gaea', 'hera', 'jet', 'orion', 'hercules', 'pclusternoaav2use1', 'azclusternoaav2eus1', 'gclusternoaav2usc1'], description: 'Specify the platform(s) to use') // Use the line below to enable the PW AWS cluster - // choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'cheyenne', 'gaeac5', 'hera', 'jet', 'orion', 'hercules', 'pclusternoaav2use1'], description: 'Specify the platform(s) to use') - // choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'derecho', 'gaeac5', 'hera', 'jet', 'orion', 'hercules'], description: 'Specify the platform(s) to use') - choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'derecho', 'hera', 'jet', 'orion', 'hercules'], description: 'Specify the platform(s) to use') + // choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'cheyenne', 'gaea', 'hera', 'jet', 'orion', 'hercules', 'pclusternoaav2use1'], description: 'Specify the platform(s) to use') + choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'derecho', 'gaea', 'hera', 'jet', 'orion', 'hercules'], description: 'Specify the platform(s) to use') // Allow job runner to filter based on compiler choice(name: 'SRW_COMPILER_FILTER', choices: ['all', 'gnu', 'intel'], description: 'Specify the compiler(s) to use to build') booleanParam name: 'SRW_WE2E_COMPREHENSIVE_TESTS', defaultValue: false, description: 'Whether to execute the comprehensive end-to-end tests' @@ -87,8 +86,7 @@ pipeline { axes { axis { name 'SRW_PLATFORM' - // values 'derecho', 'gaeac5', 'hera', 'jet', 'orion', 'hercules' //, 'pclusternoaav2use1', 'azclusternoaav2eus1', 'gclusternoaav2usc1' - values 'derecho', 'hera', 'jet', 'orion', 'hercules' //, 'pclusternoaav2use1', 'azclusternoaav2eus1', 'gclusternoaav2usc1' + values 'derecho', 'gaea', 'hera', 'jet', 'orion', 'hercules' //, 'pclusternoaav2use1', 'azclusternoaav2eus1', 'gclusternoaav2usc1' } axis { @@ -102,8 +100,7 @@ pipeline { exclude { axis { name 'SRW_PLATFORM' - // values 'derecho', 'gaeac5', 'jet', 'orion', 'hercules' //, 'pclusternoaav2use1' , 'azclusternoaav2eus1', 'gclusternoaav2usc1' - values 'derecho', 'jet', 'orion', 'hercules' //, 'pclusternoaav2use1' , 'azclusternoaav2eus1', 'gclusternoaav2usc1' + values 'derecho', 'gaea', 'jet', 'orion', 'hercules' //, 'pclusternoaav2use1' , 'azclusternoaav2eus1', 'gclusternoaav2usc1' } axis { diff --git a/.cicd/scripts/srw_ftest.sh b/.cicd/scripts/srw_ftest.sh index 95d5e2f936..5479e8b46d 100755 --- a/.cicd/scripts/srw_ftest.sh +++ b/.cicd/scripts/srw_ftest.sh @@ -85,12 +85,7 @@ module load build_${platform,,}_${SRW_COMPILER} module load wflow_${platform,,} [[ ${FORGIVE_CONDA} == true ]] && set +e +u # Some platforms have incomplete python3 or conda support, but wouldn't necessarily block workflow tests -# Gaea-C5 special case missing jinja2 -if [ "${platform}" == "gaea-c5" ]; then - conda activate workflow_tools -else - conda activate srw_app -fi +conda activate srw_app set -e -u # Adjust for strict limitation of stack size diff --git a/.cicd/scripts/wrapper_srw_ftest.sh b/.cicd/scripts/wrapper_srw_ftest.sh index e4afaf9e98..fabdbb63ef 100755 --- a/.cicd/scripts/wrapper_srw_ftest.sh +++ b/.cicd/scripts/wrapper_srw_ftest.sh @@ -23,7 +23,7 @@ else fi # Customize wrapper scripts -if [[ "${SRW_PLATFORM}" == gaea-c5 ]]; then +if [[ "${SRW_PLATFORM}" == gaea ]]; then sed -i '15i #SBATCH --clusters=c5' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh sed -i 's|qos=batch|qos=normal|g' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh fi diff --git a/devbuild.sh b/devbuild.sh index 9136b86e7a..05cc76312c 100755 --- a/devbuild.sh +++ b/devbuild.sh @@ -212,11 +212,6 @@ printf "PLATFORM(MACHINE)=${PLATFORM}\n" >&2 if [ "${PLATFORM}" = "wcoss2" ]; then BUILD_CONDA="off" fi -# Conda is not used on Gaea-c5 F2 filesystem -# it needs to be reevaluated when moved to F2 filesystem -if [ "${PLATFORM}" = "gaea-c5" ]; then - BUILD_CONDA="off" -fi # build conda and conda environments, if requested. if [ "${BUILD_CONDA}" = "on" ] ; then @@ -288,7 +283,7 @@ set -eu # automatically determine compiler if [ -z "${COMPILER}" ] ; then case ${PLATFORM} in - jet|hera|gaea-c5) COMPILER=intel ;; + jet|hera|gaea) COMPILER=intel ;; orion) COMPILER=intel ;; wcoss2) COMPILER=intel ;; cheyenne) COMPILER=intel ;; diff --git a/doc/UsersGuide/source/CustomizingTheWorkflow/ConfigWorkflow.rst b/doc/UsersGuide/source/CustomizingTheWorkflow/ConfigWorkflow.rst index b0b0301973..0c8ed8e951 100644 --- a/doc/UsersGuide/source/CustomizingTheWorkflow/ConfigWorkflow.rst +++ b/doc/UsersGuide/source/CustomizingTheWorkflow/ConfigWorkflow.rst @@ -30,7 +30,7 @@ If non-default parameters are selected for the variables in this section, they s Setting ``RUN_ENVIR`` to "community" is recommended in most cases for users who are not running in NCO's production environment. Valid values: ``"nco"`` | ``"community"`` ``MACHINE``: (Default: "BIG_COMPUTER") - The machine (a.k.a. platform or system) on which the workflow will run. Currently supported platforms are listed on the :srw-wiki:`SRW App Wiki page `. When running the SRW App on any ParallelWorks/NOAA Cloud system, use "NOAACLOUD" regardless of the underlying system (AWS, GCP, or Azure). Valid values: ``"HERA"`` | ``"ORION"`` | ``"HERCULES"`` | ``"JET"`` | ``"CHEYENNE"`` | ``"DERECHO"`` | ``"GAEA"`` | ``"GAEA-C5"`` | ``"NOAACLOUD"`` | ``"STAMPEDE"`` | ``"ODIN"`` | ``"MACOS"`` | ``"LINUX"`` | ``"SINGULARITY"`` | ``"WCOSS2"`` (Check ``ufs-srweather-app/ush/valid_param_vals.yaml`` for the most up-to-date list of supported platforms.) + The machine (a.k.a. platform or system) on which the workflow will run. Currently supported platforms are listed on the :srw-wiki:`SRW App Wiki page `. When running the SRW App on any ParallelWorks/NOAA Cloud system, use "NOAACLOUD" regardless of the underlying system (AWS, GCP, or Azure). Valid values: ``"HERA"`` | ``"ORION"`` | ``"HERCULES"`` | ``"JET"`` | ``"CHEYENNE"`` | ``"DERECHO"`` | ``"GAEA"`` | ``"NOAACLOUD"`` | ``"STAMPEDE"`` | ``"ODIN"`` | ``"MACOS"`` | ``"LINUX"`` | ``"SINGULARITY"`` | ``"WCOSS2"`` (Check ``ufs-srweather-app/ush/valid_param_vals.yaml`` for the most up-to-date list of supported platforms.) .. hint:: Users who are NOT on a named, supported Level 1 or 2 platform will need to set the ``MACHINE`` variable to ``LINUX`` or ``MACOS``. To combine use of a Linux or MacOS platform with the Rocoto workflow manager, users will also need to set ``WORKFLOW_MANAGER: "rocoto"`` in the ``platform:`` section of ``config.yaml``. This combination will assume a Slurm batch manager when generating the XML. diff --git a/etc/lmod-setup.csh b/etc/lmod-setup.csh index 92a4394893..af79ad8a70 100644 --- a/etc/lmod-setup.csh +++ b/etc/lmod-setup.csh @@ -37,7 +37,7 @@ else if ( "$L_MACHINE" == singularity ) then module purge -else if ( "$L_MACHINE" == gaea-c5 ) then +else if ( "$L_MACHINE" == gaea ) then module reset else if ( "$L_MACHINE" == derecho ) then diff --git a/etc/lmod-setup.sh b/etc/lmod-setup.sh index 7328dea76f..b030d2a9f5 100644 --- a/etc/lmod-setup.sh +++ b/etc/lmod-setup.sh @@ -44,7 +44,7 @@ elif [ "$L_MACHINE" = singularity ]; then module purge -elif [ "$L_MACHINE" = gaea-c5 ]; then +elif [ "$L_MACHINE" = gaea ]; then module reset elif [ "$L_MACHINE" = derecho ]; then diff --git a/modulefiles/build_gaea-c5_intel.lua b/modulefiles/build_gaea_intel.lua similarity index 91% rename from modulefiles/build_gaea-c5_intel.lua rename to modulefiles/build_gaea_intel.lua index ecf21dcc8d..9c21f685da 100644 --- a/modulefiles/build_gaea-c5_intel.lua +++ b/modulefiles/build_gaea_intel.lua @@ -5,7 +5,7 @@ the NOAA RDHPC machine Gaea C5 using Intel-2023.1.0 whatis([===[Loads libraries needed for building the UFS SRW App on Gaea C5 ]===]) -prepend_path("MODULEPATH","/ncrc/proj/epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core") +prepend_path("MODULEPATH","/ncrc/proj/epic/spack-stack/spack-stack-1.5.0/envs/unified-env/install/modulefiles/Core") stack_intel_ver=os.getenv("stack_intel_ver") or "2023.1.0" load(pathJoin("stack-intel", stack_intel_ver)) @@ -32,4 +32,4 @@ setenv("CXX","CC") setenv("CMAKE_C_COMPILER","cc") setenv("CMAKE_Fortran_COMPILER","ftn") setenv("CMAKE_CXX_COMPILER","CC") -setenv("CMAKE_Platform","gaea-c5.intel") +setenv("CMAKE_Platform","gaea.intel") diff --git a/modulefiles/tasks/gaea-c5/plot_allvars.local.lua b/modulefiles/tasks/gaea-c5/plot_allvars.local.lua deleted file mode 100644 index 624b869bdb..0000000000 --- a/modulefiles/tasks/gaea-c5/plot_allvars.local.lua +++ /dev/null @@ -1,6 +0,0 @@ -unload("miniconda3") -unload("python") -prepend_path("MODULEPATH","/ncrc/proj/epic/miniconda3/modulefiles") -load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) - -setenv("SRW_ENV", "regional_workflow") diff --git a/modulefiles/tasks/gaea-c5/python_srw.lua b/modulefiles/tasks/gaea-c5/python_srw.lua deleted file mode 100644 index b6107cc465..0000000000 --- a/modulefiles/tasks/gaea-c5/python_srw.lua +++ /dev/null @@ -1,8 +0,0 @@ -unload("miniconda3") -unload("python") -prepend_path("MODULEPATH","/ncrc/proj/epic/miniconda3/modulefiles") -load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) - -setenv("SRW_ENV", "workflow_tools") - -load("darshan-runtime/3.4.0") diff --git a/modulefiles/tasks/gaea/plot_allvars.local.lua b/modulefiles/tasks/gaea/plot_allvars.local.lua new file mode 100644 index 0000000000..104da06f5c --- /dev/null +++ b/modulefiles/tasks/gaea/plot_allvars.local.lua @@ -0,0 +1,4 @@ +unload("python") +load("conda") + +setenv("SRW_ENV", "srw_graphics") diff --git a/modulefiles/tasks/gaea/python_srw.lua b/modulefiles/tasks/gaea/python_srw.lua new file mode 100644 index 0000000000..5058b3f615 --- /dev/null +++ b/modulefiles/tasks/gaea/python_srw.lua @@ -0,0 +1,7 @@ +load("darshan-runtime/3.4.0") +unload("python") +load("conda") + +setenv("SRW_ENV", "srw_app") +setenv("LD_PRELOAD", "/opt/cray/pe/gcc/12.2.0/snos/lib64/libstdc++.so.6") + diff --git a/modulefiles/tasks/gaea-c5/run_vx.local.lua b/modulefiles/tasks/gaea/run_vx.local.lua similarity index 100% rename from modulefiles/tasks/gaea-c5/run_vx.local.lua rename to modulefiles/tasks/gaea/run_vx.local.lua diff --git a/modulefiles/wflow_gaea-c5.lua b/modulefiles/wflow_gaea.lua similarity index 68% rename from modulefiles/wflow_gaea-c5.lua rename to modulefiles/wflow_gaea.lua index 3073aa0522..6c24672c30 100644 --- a/modulefiles/wflow_gaea-c5.lua +++ b/modulefiles/wflow_gaea.lua @@ -6,16 +6,15 @@ the NOAA RDHPC machine Gaea C5 whatis([===[Loads libraries needed for running the UFS SRW App on gaea ]===]) unload("python") -load("set_pythonpath") -prepend_path("MODULEPATH","/ncrc/proj/epic/miniconda3/modulefiles/") -load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) prepend_path("MODULEPATH","/ncrc/proj/epic/rocoto/modulefiles/") load("rocoto") +load("conda") pushenv("MKLROOT", "/opt/intel/oneapi/mkl/2023.1.0/") +setenv("LD_PRELOAD", "/opt/cray/pe/gcc/12.2.0/snos/lib64/libstdc++.so.6") if mode() == "load" then LmodMsgRaw([===[Please do the following to activate conda: - > conda activate workflow_tools + > conda activate srw_app ]===]) end diff --git a/tests/WE2E/machine_suites/comprehensive.gaea-c5 b/tests/WE2E/machine_suites/comprehensive.gaea similarity index 100% rename from tests/WE2E/machine_suites/comprehensive.gaea-c5 rename to tests/WE2E/machine_suites/comprehensive.gaea diff --git a/tests/WE2E/machine_suites/coverage.gaea-c5 b/tests/WE2E/machine_suites/coverage.gaea similarity index 100% rename from tests/WE2E/machine_suites/coverage.gaea-c5 rename to tests/WE2E/machine_suites/coverage.gaea diff --git a/tests/WE2E/setup_WE2E_tests.sh b/tests/WE2E/setup_WE2E_tests.sh index 0644102c06..309c755966 100755 --- a/tests/WE2E/setup_WE2E_tests.sh +++ b/tests/WE2E/setup_WE2E_tests.sh @@ -45,7 +45,7 @@ function usage { } -machines=( hera jet cheyenne derecho orion wcoss2 gaea-c5 odin singularity macos noaacloud ) +machines=( hera jet cheyenne derecho orion wcoss2 gaea odin singularity macos noaacloud ) if [ "$1" = "-h" ] ; then usage ; fi [[ $# -le 2 ]] && usage diff --git a/tests/build.sh b/tests/build.sh index caf0e2b0ae..f230354a61 100755 --- a/tests/build.sh +++ b/tests/build.sh @@ -21,7 +21,7 @@ function usage() { exit 1 } -machines=( hera jet cheyenne derecho orion hercules wcoss2 gaea-c5 odin singularity macos noaacloud ) +machines=( hera jet cheyenne derecho orion hercules wcoss2 gaea odin singularity macos noaacloud ) [[ $# -gt 4 ]] && usage diff --git a/ush/load_modules_wflow.sh b/ush/load_modules_wflow.sh index cf33a43f3f..d770d7c2d9 100755 --- a/ush/load_modules_wflow.sh +++ b/ush/load_modules_wflow.sh @@ -62,12 +62,7 @@ task failed: $has_mu && set +u if [ ! -z $(command -v conda) ]; then -# Gaea-C5 special case missing jinja2 - if [ "${machine}" == "gaea-c5" ]; then - conda activate workflow_tools - else - conda activate srw_app - fi + conda activate srw_app fi $has_mu && set -u diff --git a/ush/machine/gaea-c5.yaml b/ush/machine/gaea-c5.yaml deleted file mode 100644 index 1f6f115495..0000000000 --- a/ush/machine/gaea-c5.yaml +++ /dev/null @@ -1,55 +0,0 @@ -platform: - WORKFLOW_MANAGER: rocoto - NCORES_PER_NODE: 128 - SCHED: slurm - TEST_CCPA_OBS_DIR: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/obs_data/ccpa/proc - TEST_MRMS_OBS_DIR: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/obs_data/mrms/proc - TEST_NDAS_OBS_DIR: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/obs_data/ndas/proc - TEST_NOHRSC_OBS_DIR: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/obs_data/nohrsc/proc - DOMAIN_PREGEN_BASEDIR: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/FV3LAM_pregen - QUEUE_DEFAULT: normal - QUEUE_FCST: normal - QUEUE_HPSS: normal - REMOVE_MEMORY: True - PARTITION_HPSS: eslogin_c5 - RUN_CMD_FCST: srun --export=ALL -n ${PE_MEMBER01} - RUN_CMD_POST: srun --export=ALL -n $nprocs - RUN_CMD_PRDGEN: srun --export=ALL -n $nprocs - RUN_CMD_SERIAL: time - RUN_CMD_UTILS: srun --export=ALL -n $nprocs - SCHED_NATIVE_CMD: --clusters=c5 --partition=batch --export=NONE - SCHED_NATIVE_CMD_HPSS: --clusters=es --partition=eslogin_c5 --export=NONE - PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }' - TEST_EXTRN_MDL_SOURCE_BASEDIR: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/input_model_data - TEST_PREGEN_BASEDIR: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/FV3LAM_pregen - TEST_ALT_EXTRN_MDL_SYSBASEDIR_ICS: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/dummy_FV3GFS_sys_dir - TEST_ALT_EXTRN_MDL_SYSBASEDIR_LBCS: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/dummy_FV3GFS_sys_dir - TEST_VX_FCST_INPUT_BASEDIR: '{{ "/lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/output_data/fcst_" }}{{ "ens" if (global.NUM_ENS_MEMBERS > 0) else "det" }}{{ "/{{workflow.PREDEF_GRID_NAME}}" }}{% raw %}{% endraw %}' - FIXaer: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/fix/fix_aer - FIXgsi: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/fix/fix_gsi - FIXgsm: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/fix/fix_am - FIXlut: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/fix/fix_lut - FIXorg: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/fix/fix_orog - FIXsfc: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/fix/fix_sfc_climo - FIXshp: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/NaturalEarth - EXTRN_MDL_DATA_STORES: aws -data: - ics_lbcs: - FV3GFS: - nemsio: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/input_model_data/FV3GFS/nemsio/${yyyymmdd}${hh} - grib2: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/input_model_data/FV3GFS/grib2/${yyyymmdd}${hh} - netcdf: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/input_model_data/FV3GFS/netcdf/${yyyymmdd}${hh} - RAP: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/input_model_data/RAP/${yyyymmdd}${hh} - HRRR: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/input_model_data/HRRR/${yyyymmdd}${hh} - RAP: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/input_model_data/RAP/${yyyymmdd}${hh} - GSMGFS: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/input_model_data/GSMGFS/${yyyymmdd}${hh} -rocoto: - tasks: - metatask_run_ensemble: - task_run_fcst_mem#mem#: - cores: '{{ task_run_fcst.PE_MEMBER01 // 1 }}' - native: '--cpus-per-task {{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST|int }} --exclusive {{ platform.SCHED_NATIVE_CMD }}' - nodes: - nnodes: - nodesize: - ppn: diff --git a/ush/machine/gaea.yaml b/ush/machine/gaea.yaml new file mode 100644 index 0000000000..1ec2ded2ef --- /dev/null +++ b/ush/machine/gaea.yaml @@ -0,0 +1,55 @@ +platform: + WORKFLOW_MANAGER: rocoto + NCORES_PER_NODE: 128 + SCHED: slurm + TEST_CCPA_OBS_DIR: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/obs_data/ccpa/proc + TEST_MRMS_OBS_DIR: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/obs_data/mrms/proc + TEST_NDAS_OBS_DIR: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/obs_data/ndas/proc + TEST_NOHRSC_OBS_DIR: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/obs_data/nohrsc/proc + DOMAIN_PREGEN_BASEDIR: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/FV3LAM_pregen + QUEUE_DEFAULT: normal + QUEUE_FCST: normal + QUEUE_HPSS: normal + REMOVE_MEMORY: True + PARTITION_HPSS: eslogin_c5 + RUN_CMD_FCST: srun --export=ALL -n ${PE_MEMBER01} + RUN_CMD_POST: srun --export=ALL -n $nprocs + RUN_CMD_PRDGEN: srun --export=ALL -n $nprocs + RUN_CMD_SERIAL: time + RUN_CMD_UTILS: srun --export=ALL -n $nprocs + SCHED_NATIVE_CMD: --clusters=c5 --partition=batch --export=NONE + SCHED_NATIVE_CMD_HPSS: --clusters=es --partition=eslogin_c5 --export=NONE + PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }' + TEST_EXTRN_MDL_SOURCE_BASEDIR: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/input_model_data + TEST_PREGEN_BASEDIR: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/FV3LAM_pregen + TEST_ALT_EXTRN_MDL_SYSBASEDIR_ICS: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/dummy_FV3GFS_sys_dir + TEST_ALT_EXTRN_MDL_SYSBASEDIR_LBCS: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/dummy_FV3GFS_sys_dir + TEST_VX_FCST_INPUT_BASEDIR: '{{ "/gpfs/f5/epic/world-shared/UFS_SRW_data/develop/output_data/fcst_" }}{{ "ens" if (global.NUM_ENS_MEMBERS > 0) else "det" }}{{ "/{{workflow.PREDEF_GRID_NAME}}" }}{% raw %}{% endraw %}' + FIXaer: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/fix/fix_aer + FIXgsi: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/fix/fix_gsi + FIXgsm: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/fix/fix_am + FIXlut: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/fix/fix_lut + FIXorg: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/fix/fix_orog + FIXsfc: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/fix/fix_sfc_climo + FIXshp: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/NaturalEarth + EXTRN_MDL_DATA_STORES: aws +data: + ics_lbcs: + FV3GFS: + nemsio: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/input_model_data/FV3GFS/nemsio/${yyyymmdd}${hh} + grib2: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/input_model_data/FV3GFS/grib2/${yyyymmdd}${hh} + netcdf: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/input_model_data/FV3GFS/netcdf/${yyyymmdd}${hh} + RAP: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/input_model_data/RAP/${yyyymmdd}${hh} + HRRR: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/input_model_data/HRRR/${yyyymmdd}${hh} + RAP: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/input_model_data/RAP/${yyyymmdd}${hh} + GSMGFS: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/input_model_data/GSMGFS/${yyyymmdd}${hh} +rocoto: + tasks: + metatask_run_ensemble: + task_run_fcst_mem#mem#: + cores: '{{ task_run_fcst.PE_MEMBER01 // 1 }}' + native: '--cpus-per-task {{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST|int }} --exclusive {{ platform.SCHED_NATIVE_CMD }}' + nodes: + nnodes: + nodesize: + ppn: diff --git a/ush/valid_param_vals.yaml b/ush/valid_param_vals.yaml index f432c0bd76..3530b51ae9 100644 --- a/ush/valid_param_vals.yaml +++ b/ush/valid_param_vals.yaml @@ -4,7 +4,7 @@ valid_vals_RUN_ENVIR: ["nco", "community"] valid_vals_VERBOSE: [True, False] valid_vals_DEBUG: [True, False] -valid_vals_MACHINE: ["HERA", "WCOSS2", "ORION", "HERCULES", "JET", "ODIN", "CHEYENNE", "DERECHO", "STAMPEDE", "LINUX", "MACOS", "NOAACLOUD", "SINGULARITY", "GAEA-C5"] +valid_vals_MACHINE: ["HERA", "WCOSS2", "ORION", "HERCULES", "JET", "ODIN", "CHEYENNE", "DERECHO", "STAMPEDE", "LINUX", "MACOS", "NOAACLOUD", "SINGULARITY", "GAEA"] valid_vals_SCHED: ["slurm", "pbspro", "lsf", "lsfcray", "none"] valid_vals_FCST_MODEL: ["ufs-weather-model"] valid_vals_WORKFLOW_MANAGER: ["rocoto", "ecflow", "none"] diff --git a/ush/wrappers/job_cards/sbatch/get_ics.sbatch b/ush/wrappers/job_cards/sbatch/get_ics.sbatch index 5aca1c2e7f..17b6210eae 100644 --- a/ush/wrappers/job_cards/sbatch/get_ics.sbatch +++ b/ush/wrappers/job_cards/sbatch/get_ics.sbatch @@ -25,6 +25,6 @@ export ICS_OR_LBCS='ICS' $USHdir/load_modules_run_task.sh "get_extrn_ics" $JOBSdir/JREGIONAL_GET_EXTRN_MDL_FILES -# Gaea-c5 differences: +# Gaea differences: ##SBATCH --qos=normal ##SBATCH --clusters=es --partition=eslogin_c5 --export=NONE diff --git a/ush/wrappers/job_cards/sbatch/get_lbcs.sbatch b/ush/wrappers/job_cards/sbatch/get_lbcs.sbatch index fc747ece40..46a4aad45e 100644 --- a/ush/wrappers/job_cards/sbatch/get_lbcs.sbatch +++ b/ush/wrappers/job_cards/sbatch/get_lbcs.sbatch @@ -25,6 +25,6 @@ export ICS_OR_LBCS='LBCS' $USHdir/load_modules_run_task.sh "get_extrn_lbcs" $JOBSdir/JREGIONAL_GET_EXTRN_MDL_FILES -# Gaea-c5 differences: +# Gaea differences: ##SBATCH --qos=normal ##SBATCH --clusters=es --partition=eslogin_c5 --export=NONE diff --git a/ush/wrappers/job_cards/sbatch/make_grid.sbatch b/ush/wrappers/job_cards/sbatch/make_grid.sbatch index b8866af36f..4b7dbd218c 100644 --- a/ush/wrappers/job_cards/sbatch/make_grid.sbatch +++ b/ush/wrappers/job_cards/sbatch/make_grid.sbatch @@ -17,7 +17,7 @@ export JOBSdir=`grep JOBSdir $GLOBAL_VAR_DEFNS_FP | cut -d\' -f2` $USHdir/load_modules_run_task.sh "make_grid" $JOBSdir/JREGIONAL_MAKE_GRID -# Gaea-c5 differences: +# Gaea differences: ##SBATCH --qos=normal ##SBATCH --clusters=c5 --partition=batch --export=NONE #export nprocs='24' diff --git a/ush/wrappers/job_cards/sbatch/make_ics.sbatch b/ush/wrappers/job_cards/sbatch/make_ics.sbatch index 512eefeae5..729240bdbf 100644 --- a/ush/wrappers/job_cards/sbatch/make_ics.sbatch +++ b/ush/wrappers/job_cards/sbatch/make_ics.sbatch @@ -23,7 +23,7 @@ export NWGES_DIR=$PWD'/../../../nco_dirs/nwges/20190615' $USHdir/load_modules_run_task.sh "make_ics" $JOBSdir/JREGIONAL_MAKE_ICS -# Gaea-c5 differences: +# Gaea differences: ##SBATCH --qos=normal ##SBATCH --clusters=c5 --partition=batch --export=NONE #export nprocs='48' diff --git a/ush/wrappers/job_cards/sbatch/make_lbcs.sbatch b/ush/wrappers/job_cards/sbatch/make_lbcs.sbatch index ab1d1312c8..d4db098b28 100644 --- a/ush/wrappers/job_cards/sbatch/make_lbcs.sbatch +++ b/ush/wrappers/job_cards/sbatch/make_lbcs.sbatch @@ -25,7 +25,7 @@ export bcgrpnum='1' $USHdir/load_modules_run_task.sh "make_lbcs" $JOBSdir/JREGIONAL_MAKE_LBCS -# Gaea-c5 differences: +# Gaea differences: ##SBATCH --qos=normal ##SBATCH --clusters=c5 --partition=batch --export=NONE #export nprocs='48' diff --git a/ush/wrappers/job_cards/sbatch/make_orog.sbatch b/ush/wrappers/job_cards/sbatch/make_orog.sbatch index 3b440cbd42..b0c8d21e54 100644 --- a/ush/wrappers/job_cards/sbatch/make_orog.sbatch +++ b/ush/wrappers/job_cards/sbatch/make_orog.sbatch @@ -17,7 +17,7 @@ export JOBSdir=`grep JOBSdir $GLOBAL_VAR_DEFNS_FP | cut -d\' -f2` $USHdir/load_modules_run_task.sh "make_orog" $JOBSdir/JREGIONAL_MAKE_OROG -# Gaea-c5 differences: +# Gaea differences: ##SBATCH --qos=normal ##SBATCH --clusters=c5 --partition=batch --export=NONE #export nprocs='24' diff --git a/ush/wrappers/job_cards/sbatch/make_sfc_climo.sbatch b/ush/wrappers/job_cards/sbatch/make_sfc_climo.sbatch index b791288922..52769cb033 100644 --- a/ush/wrappers/job_cards/sbatch/make_sfc_climo.sbatch +++ b/ush/wrappers/job_cards/sbatch/make_sfc_climo.sbatch @@ -17,7 +17,7 @@ export JOBSdir=`grep JOBSdir $GLOBAL_VAR_DEFNS_FP | cut -d\' -f2` $USHdir/load_modules_run_task.sh "make_sfc_climo" $JOBSdir/JREGIONAL_MAKE_SFC_CLIMO -# Gaea-c5 differences: +# Gaea differences: ##SBATCH --qos=normal ##SBATCH --clusters=c5 --partition=batch --export=NONE #export nprocs='48' diff --git a/ush/wrappers/job_cards/sbatch/run_fcst.sbatch b/ush/wrappers/job_cards/sbatch/run_fcst.sbatch index 75abd6fc03..056fd70a28 100644 --- a/ush/wrappers/job_cards/sbatch/run_fcst.sbatch +++ b/ush/wrappers/job_cards/sbatch/run_fcst.sbatch @@ -21,7 +21,7 @@ export SLASH_ENSMEM_SUBDIR='/' $USHdir/load_modules_run_task.sh "run_fcst" $JOBSdir/JREGIONAL_RUN_FCST -# Gaea-c5 differences: +# Gaea differences: ##SBATCH --qos=normal ##SBATCH --clusters=c5 --partition=batch --export=NONE #export nprocs='48' diff --git a/ush/wrappers/job_cards/sbatch/run_post.sbatch b/ush/wrappers/job_cards/sbatch/run_post.sbatch index c2a24a7f5e..6af04693f3 100644 --- a/ush/wrappers/job_cards/sbatch/run_post.sbatch +++ b/ush/wrappers/job_cards/sbatch/run_post.sbatch @@ -26,7 +26,7 @@ for (( i=0; i<=$((num_fcst_hrs)); i++ )); do $USHdir/load_modules_run_task.sh "run_post" $JOBSdir/JREGIONAL_RUN_POST done -# Gaea-c5 differences: +# Gaea differences: ##SBATCH --qos=normal ##SBATCH --clusters=c5 --partition=batch --export=NONE From 6d6c6af4a247a52454b34e9051c79f4710fc76eb Mon Sep 17 00:00:00 2001 From: Gillian Petro <96886803+gspetro-NOAA@users.noreply.github.com> Date: Fri, 1 Mar 2024 16:45:39 -0500 Subject: [PATCH 04/42] [develop]: Add Contributor's Guide to documentation (#1046) This PR adds a Contributor's Guide to the docs alongside the User's Guide. * The Contributor's Guide includes general information on use of Git submodules in the UFS. This information can be adapted in a future PR to be more SRW-specific based on user needs/requests and any training we provide. * This PR also configures the docs so that Technical Documentation can be easily added at a later date. --------- Co-authored-by: RatkoVasic-NOAA <37597874+RatkoVasic-NOAA@users.noreply.github.com> --- .readthedocs.yaml | 4 +- .../code-configuration-standards.rst | 61 ++++ doc/ContribGuide/contributing.rst | 340 ++++++++++++++++++ doc/ContribGuide/documentation.rst | 72 ++++ doc/ContribGuide/git-submodules.rst | 225 ++++++++++++ doc/ContribGuide/index.rst | 13 + doc/ContribGuide/introduction.rst | 31 ++ doc/ContribGuide/testing.rst | 70 ++++ doc/INSTALL | 2 +- doc/{UsersGuide => }/Makefile | 7 +- doc/{UsersGuide => }/README | 0 .../BackgroundInfo/CCPPUpdates.rst | 0 .../BackgroundInfo/Components.rst | 2 +- .../BackgroundInfo/Introduction.rst | 45 ++- .../BackgroundInfo/TechnicalOverview.rst | 4 +- .../{source => }/BackgroundInfo/index.rst | 0 .../BuildingRunningTesting/AQM.rst | 0 .../BuildingRunningTesting/BuildSRW.rst | 0 .../ContainerQuickstart.rst | 0 .../DefaultVarsTable.rst | 0 .../BuildingRunningTesting/Quickstart.rst | 0 .../BuildingRunningTesting/RunSRW.rst | 0 .../BuildingRunningTesting/Tutorial.rst | 0 .../BuildingRunningTesting/VXCases.rst | 0 .../BuildingRunningTesting/WE2Etests.rst | 4 +- .../BuildingRunningTesting/index.rst | 0 .../CustomizingTheWorkflow/ConfigWorkflow.rst | 6 +- .../CustomizingTheWorkflow/DefineWorkflow.rst | 0 .../InputOutputFiles.rst | 9 +- .../CustomizingTheWorkflow/LAMGrids.rst | 0 .../CustomizingTheWorkflow/TemplateVars.rst | 0 .../CustomizingTheWorkflow/index.rst | 0 doc/UsersGuide/{source => }/Reference/FAQ.rst | 0 .../{source => }/Reference/Glossary.rst | 4 +- .../{source => }/Reference/RocotoInfo.rst | 0 .../{source => }/Reference/index.rst | 0 doc/UsersGuide/{source => }/SSHIntro.rst | 0 doc/UsersGuide/index.rst | 10 + .../source/_static/theme_overrides.css | 24 -- doc/UsersGuide/source/index.rst | 16 - .../source => }/_static/custom.css | 0 doc/_static/theme_overrides.css | 26 ++ .../source => }/_templates/.gitignore | 0 doc/{UsersGuide/source => }/conf.py | 90 ++--- doc/index.rst | 9 + doc/{UsersGuide => }/make.bat | 0 doc/{UsersGuide/source => }/references.bib | 0 doc/{UsersGuide => }/requirements.in | 0 doc/{UsersGuide => }/requirements.txt | 4 +- .../source => }/tables/SRW_NATLEV_table.csv | 0 .../source => }/tables/SRW_NATLEV_table.rst | 0 .../source => }/tables/SRW_PRSLEV_table.csv | 0 .../source => }/tables/SRW_PRSLEV_table.rst | 0 doc/{UsersGuide/source => }/tables/Tests.csv | 0 doc/{UsersGuide/source => }/tables/Tests.rst | 0 doc/tables/code-managers.csv | 21 ++ .../source => }/tables/fix_file_list.rst | 0 57 files changed, 974 insertions(+), 125 deletions(-) create mode 100644 doc/ContribGuide/code-configuration-standards.rst create mode 100644 doc/ContribGuide/contributing.rst create mode 100644 doc/ContribGuide/documentation.rst create mode 100644 doc/ContribGuide/git-submodules.rst create mode 100644 doc/ContribGuide/index.rst create mode 100644 doc/ContribGuide/introduction.rst create mode 100644 doc/ContribGuide/testing.rst rename doc/{UsersGuide => }/Makefile (84%) rename doc/{UsersGuide => }/README (100%) rename doc/UsersGuide/{source => }/BackgroundInfo/CCPPUpdates.rst (100%) rename doc/UsersGuide/{source => }/BackgroundInfo/Components.rst (99%) rename doc/UsersGuide/{source => }/BackgroundInfo/Introduction.rst (78%) rename doc/UsersGuide/{source => }/BackgroundInfo/TechnicalOverview.rst (94%) rename doc/UsersGuide/{source => }/BackgroundInfo/index.rst (100%) rename doc/UsersGuide/{source => }/BuildingRunningTesting/AQM.rst (100%) rename doc/UsersGuide/{source => }/BuildingRunningTesting/BuildSRW.rst (100%) rename doc/UsersGuide/{source => }/BuildingRunningTesting/ContainerQuickstart.rst (100%) rename doc/UsersGuide/{source => }/BuildingRunningTesting/DefaultVarsTable.rst (100%) rename doc/UsersGuide/{source => }/BuildingRunningTesting/Quickstart.rst (100%) rename doc/UsersGuide/{source => }/BuildingRunningTesting/RunSRW.rst (100%) rename doc/UsersGuide/{source => }/BuildingRunningTesting/Tutorial.rst (100%) rename doc/UsersGuide/{source => }/BuildingRunningTesting/VXCases.rst (100%) rename doc/UsersGuide/{source => }/BuildingRunningTesting/WE2Etests.rst (99%) rename doc/UsersGuide/{source => }/BuildingRunningTesting/index.rst (100%) rename doc/UsersGuide/{source => }/CustomizingTheWorkflow/ConfigWorkflow.rst (99%) rename doc/UsersGuide/{source => }/CustomizingTheWorkflow/DefineWorkflow.rst (100%) rename doc/UsersGuide/{source => }/CustomizingTheWorkflow/InputOutputFiles.rst (98%) rename doc/UsersGuide/{source => }/CustomizingTheWorkflow/LAMGrids.rst (100%) rename doc/UsersGuide/{source => }/CustomizingTheWorkflow/TemplateVars.rst (100%) rename doc/UsersGuide/{source => }/CustomizingTheWorkflow/index.rst (100%) rename doc/UsersGuide/{source => }/Reference/FAQ.rst (100%) rename doc/UsersGuide/{source => }/Reference/Glossary.rst (98%) rename doc/UsersGuide/{source => }/Reference/RocotoInfo.rst (100%) rename doc/UsersGuide/{source => }/Reference/index.rst (100%) rename doc/UsersGuide/{source => }/SSHIntro.rst (100%) create mode 100644 doc/UsersGuide/index.rst delete mode 100644 doc/UsersGuide/source/_static/theme_overrides.css delete mode 100644 doc/UsersGuide/source/index.rst rename doc/{UsersGuide/source => }/_static/custom.css (100%) create mode 100644 doc/_static/theme_overrides.css rename doc/{UsersGuide/source => }/_templates/.gitignore (100%) rename doc/{UsersGuide/source => }/conf.py (85%) create mode 100644 doc/index.rst rename doc/{UsersGuide => }/make.bat (100%) rename doc/{UsersGuide/source => }/references.bib (100%) rename doc/{UsersGuide => }/requirements.in (100%) rename doc/{UsersGuide => }/requirements.txt (97%) rename doc/{UsersGuide/source => }/tables/SRW_NATLEV_table.csv (100%) rename doc/{UsersGuide/source => }/tables/SRW_NATLEV_table.rst (100%) rename doc/{UsersGuide/source => }/tables/SRW_PRSLEV_table.csv (100%) rename doc/{UsersGuide/source => }/tables/SRW_PRSLEV_table.rst (100%) rename doc/{UsersGuide/source => }/tables/Tests.csv (100%) rename doc/{UsersGuide/source => }/tables/Tests.rst (100%) create mode 100644 doc/tables/code-managers.csv rename doc/{UsersGuide/source => }/tables/fix_file_list.rst (100%) diff --git a/.readthedocs.yaml b/.readthedocs.yaml index b6afe96c93..c8ce6064b2 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -17,7 +17,7 @@ build: # Build documentation in the docs/ directory with Sphinx sphinx: - configuration: doc/UsersGuide/source/conf.py + configuration: doc/conf.py # If using Sphinx, optionally build your docs in additional formats such as PDF # formats: @@ -26,7 +26,7 @@ sphinx: # Optionally declare the Python requirements required to build your docs python: install: - - requirements: doc/UsersGuide/requirements.txt + - requirements: doc/requirements.txt submodules: include: diff --git a/doc/ContribGuide/code-configuration-standards.rst b/doc/ContribGuide/code-configuration-standards.rst new file mode 100644 index 0000000000..4cc043a9c2 --- /dev/null +++ b/doc/ContribGuide/code-configuration-standards.rst @@ -0,0 +1,61 @@ +================================== +Code and Configuration Standards +================================== + +General Policies +================== + +* Platform-specific settings should be handled only through configuration and modulefiles, not in code or scripts. +* For changes to the ``scripts``, ``ush``, or ``jobs`` directories, developers should follow the :nco:`NCO Guidelines <>` for what is incorporated into each layer. +* Developers should ensure that their contributions work with the most recent version of the ``ufs-srweather-app``, including all the specific up-to-date hashes of each subcomponent. +* Modifications should not break any existing supported capabilities on any supported platforms. +* Update the RST documentation files where appropriate as part of the PR. If necessary, contributors may update the documentation in a subsequent PR. In these cases, the contributor should :srw-repo:`open an issue ` reflecting the need for documentation and include the issue number and explanation in the Documentation section of their initial PR. +* Binary files will no longer be merged into the ``develop`` branch. A binary file is defined as a "non-text" file and can include ``*.png``, ``*.gif``, ``*.jp*g``, ``*.tiff``, ``*.tar``, ``*.tgz``, ``*.gz``, ``*.mod``, ``*.o``, and executables. If a binary file needs to be staged in the ``ufs-srweather-app`` repository, please add it to the wiki's repository. The command to clone the ``ufs-srweather-app``'s wiki repository is ``git clone https://github.com/ufs-community/ufs-srweather-app.wiki.git``. Users with write access to the wiki repository can add the files here and link them to the documentation as needed. Users who do not have write access to the wiki repository should reach out to @MichaelLueken and/or note this in their pull request so that the files can be added. + +SRW Application Guidelines +============================ + + +**General Coding Standards:** + +* The ``ufs-srweather-app`` repository must not contain source code for compiled programs. Only scripts and configuration files should reside in this repository. +* All bash scripts must explicitly be ``#!/bin/bash`` scripts. They should *not* be login-enabled (i.e., scripts should *not* use the ``-l`` flag). +* MacOS does not have all Linux utilities by default. Developers should ensure that they do not break any MacOS capabilities with their contribution. +* All code must be indented appropriately and conform to the style of existing scripts (e.g., local variables should be lowercase, global variables should be uppercase). + +**External Components** + +* All externals live in a single ``Externals.cfg`` file. +* Only a single hash will be maintained for any given external code base. All externals should point to this static hash (not to the top of a branch). +* All new entries in ``Externals.cfg`` must point only to authoritative repositories. In other words, entries must point to either a `ufs-community GitHub organization `__ repository or another NOAA project organization repository. + + * Temporary exceptions are made for a PR into the ``develop`` branch of ``ufs-srweather-app`` that is dependent on another PR. When the component PR is merged, the contributor must update the corresponding ``ufs-srweather-app`` PR with the hash of the component's authoritative repository. + +**Build System** + +* Each component must build with CMake +* Each component must build with Intel compilers on official :srw-wiki:`Level 1 ` platforms and with GNU or Intel compilers on other platforms. +* Each component must have a mechanism for platform independence (i.e., no hard-coded machine-specific settings outside of established environment, configuration, and modulefiles). +* Each component must build with the standard supported NCEPLIBS environment (currently `spack-stack `__). + +**Modulefiles** + +* All official platforms should have a modulefile that can be sourced to provide the appropriate Python packages and other settings for the platform. +* Each SRW component must build using the common modules located in the ``modulefiles/srw_common`` file. + + +Workflow Coding Standards +-------------------------- + +**Python Coding Standards:** + + * All new Python workflow contributions should come with an appropriate environment YAML file (similar to ``environment.yaml``, ``graphics_environment.yaml``, and ``aqm_environment.yaml``). + * Keep the use of external Python packages to a minimum for necessary workflow tasks. Currently, these include ``f90nml``, ``pyyaml``, and ``Jinja2``. + +**Workflow Design:** Follow the :nco:`NCO Guidelines <>` for what is incorporated in each layer of the workflow. This is particularly important in the ``scripts`` directory. + +**Management of the Configuration File:** New configurable options must be consistent with existing configurable options and be documented in :srw-repo:`UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst `. Add necessary checks on acceptable options where applicable. Add appropriate default values in ``config_defaults.yaml``. + +**Management of Template Files:** If a new configurable option is required in an existing template, it must be handled similarly to its counterparts in the scripts that fill in the template. For example, if a new type of namelist is introduced for a new application component, it should make use of the existing ``jinja`` framework for populating namelist settings. + +**Namelist Management:** Namelists in ``ufs-srweather-app`` are generated using a Python tool and managed by setting YAML configuration parameters. This allows for the management of multiple configuration settings with maximum flexibility and minimum duplication of information. \ No newline at end of file diff --git a/doc/ContribGuide/contributing.rst b/doc/ContribGuide/contributing.rst new file mode 100644 index 0000000000..ed1671363e --- /dev/null +++ b/doc/ContribGuide/contributing.rst @@ -0,0 +1,340 @@ +============================ +Contributing to the SRW App +============================ + +Fork and PR Overview +===================== + +.. note:: + + Thank you to the Unified Workflow (UW) team for allowing us to adapt their Fork and PR Model overview for use in the SRW App. The original can be viewed in the `uwtools` :uw:`documentation `. + +Contributions to the ``ufs-srweather-app`` project are made via a :github-docs:`Fork` and :github-docs:`Pull Request (PR)` model. GitHub provides a thorough description of this contribution model in their `Contributing to a project` :github-docs:`Quickstart`, but the steps, with respect to ``ufs-srweather-app`` contributions, can be summarized as: + +#. :github-docs:`Create an issue ` to document proposed changes. +#. :github-docs:`Fork` the :srw-repo:`ufs-srweather-app repository<>` into your personal GitHub account. +#. :github-docs:`Clone` your fork onto your development system. +#. :github-docs:`Create a branch` in your clone for your changes. All development should take place on a branch, *not* on ``develop``. +#. :github-docs:`Make, commit, and push changes` in your clone / to your fork. +#. When your work is complete, :github-docs:`create a pull request (PR)` to merge your changes. + +For future contributions, you may delete and then recreate your fork or configure the official ``ufs-srweather-app`` repository as a :github-docs:`remote repository` on your clone and :github-docs:`sync upstream changes` to stay up-to-date with the official repository. + + +Development and Testing Process +================================= + +#. **Create issue:** Open an :srw-repo:`issue ` in the ``ufs-srweather-app`` to document proposed changes. See :ref:`Opening an Issue ` for detailed instructions. +#. **Fork & Clone the SRW App:** :github-docs:`Fork` the :srw-repo:`ufs-srweather-app repository<>` into your personal GitHub account and :github-docs:`clone` your fork onto your development system if you have not already done so. +#. **Create a branch:** in your clone for your changes. All development should take place on a branch, not on ``develop``. Branches should be named as follows, where ``[name]`` is a one-word description of the branch: + + * ``bugfix/[name]``: Fixes a demonstrably incorrect portion of code + * ``feature/[name]``: Adds a new feature to the code or improves an existing portion of the code + * ``text/[name]``: Changes elements of the repository that do not impact the compiled code in any way (e.g., changes to README, documentation, comments, changing quoted Registry elements, white space alignment). + +#. **Development:** Perform and test changes in the feature branch (not on ``develop``!). Document work in the issue and mention the issue number in commit messages to link your work to the issue (e.g., ``commit -m "Issue #23 - "``). Document changes to the workflow and capabilities in the RST files so that the SRW App documentation stays up-to-date. +#. **Testing:** Test code modifications on as many platforms as possible, and request help with further testing from the code management team when unable to test on all Level 1 platforms. The bare minimum testing required before opening a PR is to run the fundamental (:srw-repo:`tests/WE2E/machine_suites/fundamental `) tests on at least one supported machine (additional testing from the comprehensive suite might be required, depending on the nature of the change). To run the fundamental tests manually, please use the following command in the ``tests/WE2E`` directory: + + .. code-block:: console + + ./run_WE2E_tests.py -t=fundamental -m=your_machine -a=your_account + + where ``your_machine`` is the Tier-1 machine you are running the tests on, and ``your_account`` is the account you charge your computational resources to. See section :numref:`Section %s ` for more detail on SRW App testing. + +#. **Pull Request:** When your work is complete, :github-docs:`create a pull request` to merge your changes. When a PR is initiated, the :ref:`PR template ` autofills. Developers should use the template to provide information about the PR in the proper fields. See the guidelines in the :ref:`Making a Pull Request ` section for more details on making a good pull request. +#. **Merge** - When review and testing are complete, a code manager will merge the PR into ``develop``. PRs that are not ready for merging should have a "Work in Progress" label on them. Users who lack the permissions required to add the label can request in their PR that a code manager do so. +#. **Cleanup** - After the PR is merged, the code developer should delete the branch on their fork and close the issue. Feature branches are intended to be short-lived, concentrated on code with one sole purpose, and applicable to a single PR. A new feature branch should be created when subsequent code development continues. + +.. note:: + + Communication with code managers and the :ref:`repository code management team ` throughout the process is encouraged. + +.. _open-issue: + +Opening an Issue +================= + +All changes to ``ufs-srweather-app`` should be associated with a :srw-repo:`GitHub Issue `. Developers should search the existing issues in the ``ufs-srweather-app`` repository before beginning their work. If an issue does not exist for the work they are doing, they should create one prior to opening a new pull request. If an issue does exist, developers should be sure to collaborate to avoid duplicative work. + +To open an issue, click on :srw-repo:`"New Issue"` within the ``ufs-srweather-app`` GitHub repository. + +Choose from four options: + +#. :srw-repo:`Bug Report `: Report specific problems ("bugs") in the code using the following template: + + .. code-block:: console + + + + Your bug may already be reported! + Please search on the [Issue tracker](https://github.com/ufs-community/ufs-srweather-app/issues) before creating a new issue. + If an issue already exists, please use that issue to add any additional information. + + ## Expected behavior + + + ## Current behavior + + + ## Machines affected + + + + ## Steps To Reproduce + + + ## Detailed Description of Fix (optional) + + + ## Additional Information (optional) + + + ## Possible Implementation (optional) + + + ## Output (optional) + + +#. :srw-repo:`Feature Request `: New features and feature enhancements fall under this category. Propose features and enhancements using the following template. Optional sections may be deleted. + + .. code-block:: console + + + + Your issue may already be reported! + Please search on the [Issue tracker](https://github.com/ufs-community/ufs-srweather-app/issues) before creating a new issue. If an issue already exists, please use that issue to add any additional information. + + ## Description + + + + + ## Solution + + + ## Requirements** + + + ## Acceptance Criteria (Definition of Done) + + + ## Dependencies (optional) + + + + ## Alternative Solutions (optional) + + + +#. :srw-repo:`Text-Only Changes `: Propose text-only changes using the "Text-only request" template. Optional sections may be deleted. + + .. code-block:: console + + ## Description + + + ## Solution + + + ## Alternatives (optional) + + + ## Related to (optional) + + +#. :srw-repo:`Other `: Open a blank issue, and use the "Feature Request" template above as a starting point to describe the issue. + +For all issue reports, indicate whether this is: + #. A problem that you plan to work on and submit a PR for + #. A problem that you will **not** work on but that requires attention + #. A suggested improvement + +Additionally, please add a priority label to the issue (low, medium, or high priority). If you are unable to add labels to your issues, please request that a code manager add a priority label for you. + + * **High priority:** Issues related to a bug fix, a failing test configuration, or an update required for a release (either an operational implementation or public release). + * **Medium priority:** New features that are not required immediately for either an implementation or release + * **Low priority:** Refactoring work or other work that does not seem to be medium or high priority. + +If you are unable to work on the issue and require assistance through :term:`EPIC`, please make sure to include the ``EPIC Support Requested`` label. If the ``EPIC Support Requested`` label is added to a ``high priority`` issue, it might take some time before EPIC will work on the issue, since EPIC management needs to account for and prioritize these issues. However, after seeing that EPIC is required for high priority issues, management will adapt and allocate the necessary resources to assist. After filling out the issue report, click on "Submit new issue." + + +.. _make-pr: + +Making a Pull Request +====================== + +All changes to the SRW App ``develop`` branch should be handled via GitHub’s "Pull Request" (PR) functionality from a branch in the developer's fork. When creating your PR, please follow these guidelines, specific to the ``ufs-srweather-app`` project: + +* Ensure that your PR is targeting the base repository ``ufs-community/ufs-srweather-app`` and an appropriate base branch (usually ``develop``). +* **Complete PR template.** Your PR will appear pre-populated with a :ref:`template ` that you should complete. Provide an informative synopsis of your contribution, then mark appropriate checklist items by placing an "X" between their square brackets. You may tidy up the description by removing boilerplate text and non-selected checklist items. View :ref:`useful PR template guidance ` and information on :ref:`best practices ` for completing each section below. +* **Create draft PR.** Use the pull-down arrow on the green button below the description to initially create a :github-docs:`draft pull request`. + + * Once your draft PR is open, visit its *Files changed* tab and add comments to any lines of code where you think reviewers will benefit from more explanation. Try to save time by proactively answering questions you suspect reviewers will ask. + +* **Open PR.** Once your draft PR is marked up with your comments and ready for review, return to the *Conversation* tab and click the *Ready for review* button. + + * A default set of reviewers will automatically be added to your PR. You may add or request others, if appropriate. Pull requests will be reviewed and approved by at least two code reviewers, at least one of whom must be a code manager. Reviewers may make comments, ask questions, or request changes on your PR. Respond to these as needed, making commits in your clone and pushing to your fork/branch. Your PR will automatically be updated when commits are pushed to its source branch in your fork, so reviewers will immediately see your updates. When a PR has met the contribution and testing requirements and has been approved by two code reviewers, a code manager will merge the PR. + +.. _pr-template: + +PR Template +------------ + +Here is the template that is provided when developers click "Create pull request": + +.. code-block:: console + + - Update develop to head at ufs-community + + - Use this template to give a detailed message describing the change you want to make to the code. + + - You may delete any sections labeled "optional" and any instructions within . + + - If you are unclear on what should be written here, see https://github.com/wrf-model/WRF/wiki/Making-a-good-pull-request-message for some guidance and review the Code Contributor's Guide at https://github.com/ufs-community/ufs-srweather-app/wiki/Code-Manager's-Guide. + + - Code reviewers will assess the PR based on the criteria laid out in the Code Reviewer's Guide (https://github.com/ufs-community/ufs-srweather-app/wiki/Code-Manager's-Guide). + + - The title of this pull request should be a brief summary (ideally less than 100 characters) of the changes included in this PR. Please also include the branch to which this PR is being issued (e.g., "[develop]: Updated UFS_UTILS hash"). + + - Use the "Preview" tab to see what your PR will look like when you hit "Create pull request" + + + # --- Delete this line and those above before hitting "Create pull request" --- + + ## DESCRIPTION OF CHANGES: + + + ### Type of change + + - [ ] Bug fix (non-breaking change which fixes an issue) + - [ ] New feature (non-breaking change which adds functionality) + - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) + - [ ] This change requires a documentation update + + ## TESTS CONDUCTED: + + + + - [ ] hera.intel + - [ ] orion.intel + - [ ] hercules.intel + - [ ] cheyenne.intel + - [ ] cheyenne.gnu + - [ ] derecho.intel + - [ ] gaea.intel + - [ ] gaeac5.intel + - [ ] jet.intel + - [ ] wcoss2.intel + - [ ] NOAA Cloud (indicate which platform) + - [ ] Jenkins + - [ ] fundamental test suite + - [ ] comprehensive tests (specify *which* if a subset was used) + + ## DEPENDENCIES: + + + ## DOCUMENTATION: + + + ## ISSUE: + + + ## CHECKLIST + + - [ ] My code follows the style guidelines in the Contributor's Guide + - [ ] I have performed a self-review of my own code using the Code Reviewer's Guide + - [ ] I have commented my code, particularly in hard-to-understand areas + - [ ] My changes need updates to the documentation. I have made corresponding changes to the documentation + - [ ] My changes do not require updates to the documentation (explain). + - [ ] My changes generate no new warnings + - [ ] New and existing tests pass with my changes + - [ ] Any dependent changes have been merged and published + + ## LABELS (optional): + + A Code Manager needs to add the following labels to this PR: + - [ ] Work In Progress + - [ ] bug + - [ ] enhancement + - [ ] documentation + - [ ] release + - [ ] high priority + - [ ] run_ci + - [ ] run_we2e_fundamental_tests + - [ ] run_we2e_comprehensive_tests + - [ ] Needs Cheyenne test + - [ ] Needs Jet test + - [ ] Needs Hera test + - [ ] Needs Orion test + - [ ] help wanted + + ## CONTRIBUTORS (optional): + + +.. _pr-template-guidance: + +PR Template Guidance +--------------------- + +**TITLE:** Titles should start with the branch name in brackets and should give code reviewers a clear idea of what the change will do in approximately 5-10 words. Some good examples: + + * [develop] Make thompson_mynn_lam3km ccpp suite available + * [release/public-v2] Add a build_linux_compiler modulefile + * [develop] Fix module loads on Hera + * [develop] Add support for Rocoto with generic LINUX platform + +All of the above examples concisely describe the changes contained in the pull request. The title will not get cut off in emails and web pages. In contrast, here are some made-up (but plausible) examples of BAD pull request titles: + + * Bug fixes (Bug fixes on what part of the code?) + * Changes to surface scheme (What kind of changes? Which surface scheme?) + +**DESCRIPTION OF CHANGES:** The first line of the description should be a single-line "purpose" for this change. Note the type of change (i.e., bug fix, feature/enhancement, text-only). Summarize the problem, proposed solution, and required changes. If this is an enhancement or new feature, describe why the change is important. + +**DOCUMENTATION:** Developers should include documentation on new capabilities and enhancements by updating the appropriate RST documentation files in their fork prior to opening the PR. These documentation updates should be noted in the "Documentation" section of the PR message. If necessary, contributors may submit the RST documentation in a subsequent PR. In these cases, the developers should include any existing documentation in the "Documentation" section of the initial PR message or as a file attachment to the PR. Then, the contributor should open an issue reflecting the need for official RST documentation updates and include the issue number and explanation in the "Documentation" section of the initial PR template. + +.. _tips-best-practices: + +Tips, Best Practices, and Protocols to Follow When Issuing a PR +----------------------------------------------------------------- + +* **Label PR status appropriately.** If the PR is not completely ready to be merged, please add a "Work In Progress" label. Urgent PRs should be marked "high priority." All PRs should have a type label (e.g., "bug," "enhancement"). Labels can be added on the right-hand side of a submitted PR request by clicking on the gear icon beside "Labels" (below the list of reviewers). If users do not have the permissions to add a label to their PR, they should request in their PR description that a code manager add the appropriate labels. +* **Indicate urgency.** If a PR is particularly urgent, this information should be provided in the PR "Description" section, and multiple code management team members should be tagged to draw attention to this PR. After submitting the PR, a "high priority" label should be added to it. +* **Indicate the scope of the PR.** If the PR is extremely minor (e.g., change to the README file), indicate this in the PR message. If it is an extensive PR, the developer should test it on as many platforms as possible and stress the necessity that it be tested on systems for which they do not have access. +* **Clarify in the PR message where the code has been tested.** At a minimum, code should be tested on the platform where code modification has taken place. It should also be tested on machines where code modifications will impact results. If the developer does not have access to these platforms, this should be noted in the PR. +* **Follow separation of concerns.** For example, module loads are only handled in the appropriate modulefiles, Rocoto always sets the work directory, j-jobs make the work directory, and ex-scripts require the work directory to exist. +* **Target subject matter experts (SMEs) among the code management team.** When possible, tag team members who are familiar with the modifications made in the PR so that the code management team can provide effective and streamlined PR reviews and approvals. Developers can tag SMEs by selecting the gear icon next to "Assignees" (under the Reviewers list) and adding the appropriate names. +* **Schedule a live code review** if the PR is exceptionally complex in order to brief members of the code management team on the PR either in-person or through a teleconference. Developers should indicate in the PR message that they are interested in a live code review if they believe that it would be beneficial. + +Merging +======== + +Your PR is ready to merge when: + +#. It has been approved by a required number of ``ufs-srweather-app`` reviewers, including at least one code manager. +#. All conversations have been marked as resolved. +#. All required checks have passed. + +These criteria and their current statuses are detailed in a section at the bottom of your PR's *Conversation* tab. Checks take some time to run, so please be patient. + +In general, the lead code manager will merge the PR when ready. Developers with write permissions should not merge their code themselves unless instructed otherwise by the lead code manager. + +Need Help? +=========== + +See the :ref:`User Support ` section for an overview of user support options. For assistance directly related to a PR, please use comments in the *Conversation* tab of your PR to ask for help with any difficulties you encounter! diff --git a/doc/ContribGuide/documentation.rst b/doc/ContribGuide/documentation.rst new file mode 100644 index 0000000000..9e0bad6bda --- /dev/null +++ b/doc/ContribGuide/documentation.rst @@ -0,0 +1,72 @@ +.. _doc-guidelines: + +Documentation +============= + +.. note:: + + Thank you to the Unified Workflow (UW) team for allowing us to adapt their documentation guidance for use in the SRW App. The original can be viewed in the `uwtools` :uw:`documentation `. + + +Locally Building and Previewing Documentation +--------------------------------------------- + +To locally build the docs: + +#. Install ``sphinx``, ``sphinx-rtd-theme``, and ``sphinxcontrib-bibtex`` on your system if they are not already installed. +#. From the root of your clone: ``cd doc`` +#. Build the docs: ``make doc`` + +The ``make doc`` command will build the documentation under ``doc/build/html``, after which you can preview them in your web browser at the URL: + +.. code-block:: text + + file:///doc/build/html/index.html + +Rerun ``make doc`` and refresh your browser after making and saving changes. + +Viewing Online Documentation +---------------------------- + +Online documentation generation and hosting for the SRW App is provided by :rtd:`Read the Docs<>`. The green *View Docs* button near the upper right of that page links to the official docs for the project. When viewing the documentation, the version selector at the bottom of the navigation column on the left can be used to switch between the latest development code (``develop``), the latest released version (``latest``), and any previously released version. + +Docs are also built and temporarily published when Pull Requests (PRs) targeting the ``develop`` branch are opened. Visit the :rtd:`Builds page` to see recent builds, including those made for PRs. Click a PR-related build marked *Passed*, then the small *View docs* link (**not** the large green *View Docs* button) to see the docs built specifically for that PR. If your PR includes documentation updates, it may be helpful to include the URL of this build in your PR's description so that reviewers can see the rendered HTML docs and not just the modified ``.rst`` files. Note that if commits are pushed to the PR's source branch, Read the Docs will rebuild the PR docs. See the checks section near the bottom of a PR for current status and for another link to the PR docs via the *Details* link. + +.. COMMENT: Technically, docs are built when any PR is opened, regardless of branch. Look into changing. + +Documentation Guidelines +------------------------ + +Please follow these guidelines when contributing to the documentation: + +* Keep formatting consistent across pages. Update all pages when better ideas are discovered. Otherwise, follow the conventions established in existing content. +* Ensure that the ``make doc`` command completes with no errors or warnings. +* If the link-check portion of ``make doc`` reports that a URL is ``permanently`` redirected, update the link in the docs to use the new URL. Non-permanent redirects can be left as-is. +* Do not manually wrap lines in the ``.rst`` files. Insert newlines only as needed to achieve correctly formatted HTML, and let HTML wrap long lines and/or provide a scrollbar. +* Use one blank line between documentation elements (headers, paragraphs, code blocks, etc.) unless additional lines are necessary to achieve correctly formatted HTML. +* Remove all trailing whitespace. +* In general, avoid pronouns like "we" and "you". (Using "we" may be appropriate when synonymous with "The SRW Code Management Team", "The UFS Community", etc., when the context is clear.) Prefer direct, factual statements about what the code does, requires, etc. +* Use the `Oxford Comma `__. +* Follow the :rst:`RST Sections` guidelines, underlining section headings with = characters, subsections with - characters, and subsubsections with ^ characters. If a further level of refinement is needed, use " to underline paragraph headers. +* In [[sub]sub]section titles, capitalize all "principal" words. In practice this usually means all words but articles (a, an, the), logicals (and, etc.), and prepositions (for, of, etc.). Always fully capitalize acronyms (e.g., YAML). +* Never capitalize proper names when their owners do not (e.g., write `"pandas" `__, not "Pandas", even at the start of a sentence) or when referring to a software artifact (e.g., write ``numpy`` when referring to the library, and "NumPy" when referring to the project). +* When referring to YAML constructs, ``block`` refers to an entry whose values is a nested collection of key/value pairs, while ``entry`` is a single key/value pair. +* When using the ``.. code-block::`` directive, align the actual code with the word ``code``. Also, when ``.. code-block::`` directives appear in bulleted or numberd lists, align them with the text following the space to the right of the bullet/number. Include a blank line prior to the coe content. For example: + + .. code-block:: text + + * Lorem ipsum + + .. code-block:: python + + n = 88 + + or + + .. code-block:: text + + #. Lorem ipsum + + .. code-block:: python + + n = 88 \ No newline at end of file diff --git a/doc/ContribGuide/git-submodules.rst b/doc/ContribGuide/git-submodules.rst new file mode 100644 index 0000000000..42e7616cfe --- /dev/null +++ b/doc/ContribGuide/git-submodules.rst @@ -0,0 +1,225 @@ +============================ +Working with Git Submodules +============================ + +.. note:: + + Thank you to Janet Derrico (@jderrico-noaa) [#f1]_ for authoring the summary of Git submodules on which this chapter is based. [#f2]_ It has been adapted slightly for use in the SRW App. + +What Are Git Submodules? +========================= + +Git submodules are pointers to other Git repositories. They enable developers to include external repositories as a subdirectory within their main project. This is particularly useful when a project depends on external libraries or components that are developed and maintained in separate repositories. + +Key Benefits +============= + +* **Version Control:** Submodules link to specific commits in external repositories, ensuring consistency and predictability. Developers can control exactly which version of an external repository their project depends on. +* **Separate Development:** Changes to submodules are tracked separately from the main repository, allowing for independent development of external dependencies. +* **Collaborative Workflows:** Multiple teams can work on different parts of a larger project simultaneously without interference, each with its own repository (e.g. changes to ``ccpp-physics`` can be developed at the same time as changes to ``ufs-weather-model``). + +How Submodules Are Linked +========================== + +Git knows which submodules to check out based on two key pieces of information: the submodule pointer, and the information on where to find that pointer. The pointer is a commit reference---when you add a submodule to your repository, Git doesn't just store the URL; it also records a specific commit hash from that submodule. The commit hash is what Git uses to know which exact state of the submodule to checkout. These commit references are stored in the main repository and are updated whenever a change is committed in the submodule. When you run ``git submodule update``, Git checks out the commit of each submodule according to what is recorded in the main repository. The ``.gitmodules`` file tracks where to find this information, storing the submodule's path within your repository and its corresponding URL. + +If you commit a hash in a submodule but push to a different fork, then Git will add the new submodule hash to the supermodule, which will result in a Git error when trying to recursively check out the supermodule. + +Adding a Submodule +=================== + +You can add a submodule to your repository using ``git submodule add ``. This clones the external repository to the specified path and adds a new entry in a special file named ``.gitmodules``. + +Cloning a Repository with Submodules +===================================== +When cloning a repository that has submodules, use git clone --recursive to ensure that all submodules are also cloned. + +Updating a Submodule +====================== + +To update a submodule, navigate into the submodule directory, check out the desired commit or branch, and then go back to the main repository to commit this change. Here is an example for making a change to ``ccpp-physics``, ``fv3``, and ``ufs-weather-model``. Since ``ccpp-phsyics`` is a submodule of ``fv3atm`` and ``ufs-weather-model``, a change to ``ccpp-physics`` requires PRs to all three repositories. +This method requires two remotes on your local workspace: the authoritative (e.g., ``ufs-community/ufs-weather-model``) and the personal fork you push to (e.g., ``jderrico-noaa/ufs-weather-model``). The steps involved are: + +#. Clone locally +#. Create your working branches +#. Commit your changes +#. Push your working branches to your personal fork +#. Submit PRs from personal fork to authoritative + +Cloning the Authoritative Repository and Adding Your Personal Fork +-------------------------------------------------------------------- + +Clone the authoritative repository to your local workspace: + +.. code-block:: console + + git clone --recursive -b branch-name https://github.com/ufs-community/ufs-weather-model + cd ufs-weather-model + +where ``branch-name`` is the name of the branch you want to clone (usually ``develop``). + +Adding Your Personal Fork as a Remote Repository +-------------------------------------------------- + +.. code-block:: console + + git remote add my-fork + +where ``my-fork`` is the name of your fork. You can name your fork whatever you want as long as you can distinguish it from the authoritative (e.g., janet) https://github.com//ufs-weather-model + +Run: + +.. code-block:: console + + git remote -v + +to show the remote repositories that have been added to your local copy of ``ufs-weather-model``, if should show origin (the authoritative ufs-community repo) and my-fork (your personal fork that you push changes to) +The local repository for ufs-weather-model has been created. This process is repeated for the other submodules (``fv3atm`` and ``ccpp-physics``, where the code will be modified): + +.. code-block:: console + + cd FV3 + git remote add my-fork https://github.com//fv3atm + cd ccpp/physics + git remote add my-fork https://github.com//ccpp-physics + +Create Working Branches +------------------------ + +The next step is to create working branches that will hold your changes until they are merged. From ``ccpp-physics``, navigate up to ``ufs-weather-model``. It is good practice to checkout the main branch (e.g., ``develop``) to ensure that you are working with the latest updates and then create your working branch. You will do this all the way down: + + +Then, navigate from ``ccpp/physics`` back to to ``ufs-weather-model`` and create a new branch to hold your changes: + +.. code-block:: console + + cd ../../.. + git checkout -b working_branch + +This command creates a new branch named ``working_branch``; in practice the branch name should be more descriptive and reflect the development it will be holding. Follow the same process for the Git submodules you will be working in: + +.. code-block:: console + + cd FV3 + git checkout develop + git checkout -b working_branch + cd ccpp/physics + git checkout ufs/dev + git checkout -b working_branch + +Commit Changes and Push Working Branches +------------------------------------------ + +As you make changes to the code, you should commit often. This ensures that all of your development is tracked (so you don't lose anything) and makes it easier to go back to a working version if one of your changes breaks things (it happens!). Commit messages should be descriptive of the changes they contain. + +To push your working branches to your fork from the top down, navigate to the ``ufs-weather model`` directory. Then run: + +.. code-block:: console + + git push -u my-fork working_branch + +The ``-u`` flag here tells Git to set ``my-fork/working_branch`` as the default remote branch for ``working_branch``. After executing this command, you can simply use ``git push`` or ``git pull`` while on ``working_branch``, and Git will automatically know to push or pull from ``my_fork/working_branch``. + +Continue this process with the other submodule repositories: + +.. code-block:: console + + cd FV3 + git push -u my-fork working_branch + cd ccpp/physics + git push -u my-fork working_branch + +All working changes are now in your personal fork. + +Submitting PRs +--------------- + +When working with Git submodules, developers must submit individual pull requests to each repository where changes were made and link them to each other. In this case, developers would submit PRs to ``ufs-weather-model``, ``fv3atm``, and ``ccpp-physics``. There are several steps to this process: opening the PR, updating the submodules, and creating new submodule pointers. Each authoritative repository should have its own PR template that includes space to link to the URLs of related PRs. If for some reason this is not the case, developers should link to the related PRs in the "Description" section of their PR. + +Updating the Submodules +^^^^^^^^^^^^^^^^^^^^^^^^ + +When changes are made to the authoritative repositories while you are developing or while your PR is open, you need to update the PR to include those updates. From your local workspace, navigate to ``ufs-weather-model`` and run: + +.. code-block:: console + + git checkout develop + git pull origin develop + git checkout working_branch + git merge develop + git push -u my-fork working_branch + +This will check out the ``develop`` branch, retrieve the latest updates, then check out the ``working_branch`` and merge the latest changes from ``develop`` into it. After pushing the changes on ``working_branch`` to your personal fork, your PR will update automatically. This process must then be repeated for the other components (e.g., ``fv3`` and ``ccpp-physics``). It is important to check that you are merging the correct branch---for example, the main development branch in ``ufs-community/ccpp-physics`` is ``ufs/dev``, so you would checkout/pull ``ufs/dev`` instead. + +.. note:: + + If you have already pushed ``working_branch`` to ``my-fork`` using the ``-u`` flag, you can omit the flag and fork specification, but it doesn't hurt to use them. + +Add Submodule Pointers +^^^^^^^^^^^^^^^^^^^^^^^ +To create submodule pointers, developers will navigate to the lowest submodule directory (rather than going from the top down) to create pointers linking the submodule to the supermodule. In this example, we are using *ufs-weather-model → fv3 → ccpp-physics*, so developers would start by navigating to ``ccpp-physics``. Once your PR to ``ccpp-physics`` is merged, you then need to update your PRs to ``fv3`` and ``ufs-weather-model`` so that they point to the updated ``ccpp-physics`` submodule. + +First, update the local copy of ``ccpp-physics`` with what was merged to the authoritative (e.g., your changes): + +.. code-block:: console + + git checkout ufs/dev + git pull origin ufs/dev + +Then navigate to ``fv3atm``: + +.. code-block:: console + + cd ../.. + +If you were working with other submodules, you would navigate to submodule above the lowest here. Then create the submodule pointer, commit the change, and push it to your fork of ``fv3atm``: + +.. code-block:: console + + git checkout working_branch + git add ccpp/physics + git commit -m "update submodule pointer for ccpp-physics" + git push -u my-fork working_branch + +Once again, pushing to your personal fork will automatically update the PR that includes ``working_branch``. + +The ``fv3atm`` code managers will then merge your ``fv3atm`` PR, at which point only the ``ufs-weather-model`` PR will require a submodule pointer update. From your local workspace, navigate to the ``fv3`` directory (``ufs-weather-model/FV3``) and update the local copy of ``fv3atm`` with what was just merged into the authoritative: + +.. code-block:: console + + git checkout develop + git pull origin develop + +Then, navigate up to ``ufs-weather model`` directory, check out the working branch, and add the submodule pointer for ``fv3atm``. Commit and push the changes to your personal fork. + +.. code-block:: console + + cd .. + git checkout working_branch + git add FV3 + git commit -m "update submodule pointer for fv3atm" + git push -u my-fork + +The UFS code managers will then test and merge the ``ufs-weather-model`` PR. + +Switching Branches With Submodules +==================================== + +If you are working off a branch that has different versions (or commit references/pointers) of submodules, it is important to synchronize the submodules correctly. From the supermodule, you would switch to your desired branch and then update the submodules. For example, if you want to work on a different branch of the ``ufs-weather-model`` repository: + +.. code-block:: console + + git checkout desired_branch + git submodule update --init --recursive + +Here, ``--init`` initializes any submodules that have not yet been initialized, while ``--recursive`` ensures that all nested submodules (e.g., ``fv3atm``) are updated. If you know there have been upstream changes to a submodule, and you want to incorporate these latest changes, you would go into each submodule directory and pull the changes: + +.. code-block:: console + + cd path/to/submodule + git pull origin + +When working with submodules, it is best practice to always run ``git submodule update --init --recursive`` after switching branches. Changes to submodules need to be committed and pushed separately within their respective repositories (see sections above). + +.. [#f1] of NOAA Global Systems Laboratory (GSL) and Coorperative Institute for Research in Environmental Sciences (CIRES) +.. [#f2] with the assistance of Grant Firl, Joseph Olson, and ChatGPT \ No newline at end of file diff --git a/doc/ContribGuide/index.rst b/doc/ContribGuide/index.rst new file mode 100644 index 0000000000..c4adfc997d --- /dev/null +++ b/doc/ContribGuide/index.rst @@ -0,0 +1,13 @@ +Contributor's Guide +====================== + +.. toctree:: + :maxdepth: 3 + + introduction + contributing + code-configuration-standards + testing + git-submodules + documentation + diff --git a/doc/ContribGuide/introduction.rst b/doc/ContribGuide/introduction.rst new file mode 100644 index 0000000000..687dc2ea25 --- /dev/null +++ b/doc/ContribGuide/introduction.rst @@ -0,0 +1,31 @@ +================= +Introduction +================= + +Background +============ + +Authoritative Branch +---------------------- + +The ``ufs-srweather-app`` repository maintains a main branch for development called ``develop``. The HEAD of ``develop`` reflects the latest development changes. It points to regularly updated hashes for individual subcomponents. Pull requests (PRs) are typically merged to ``develop``. + +The ``develop`` branch is protected by the code management team: + + #. Pull requests for this branch require approval by at least two code reviewers. + #. A code manager should perform at least one of the reviews and the merge, but other contributors are welcome to provide comments/suggestions. + +.. _rcm-team: + +Repository Code Management Team +--------------------------------- + +Scientists and engineers from multiple labs and organizations have volunteered to review pull requests for the ``develop`` branch: + +.. csv-table:: + :file: ../tables/code-managers.csv + :widths: auto + :delim: ; + :header-rows: 1 + + diff --git a/doc/ContribGuide/testing.rst b/doc/ContribGuide/testing.rst new file mode 100644 index 0000000000..b296a3f90a --- /dev/null +++ b/doc/ContribGuide/testing.rst @@ -0,0 +1,70 @@ +.. _pr-testing: + +======== +Testing +======== + +The ``ufs-srweather-app`` repository uses the established workflow end-to-end (WE2E) testing framework (see :ref:`WE2E tests `) to implement two tiers of testing: fundamental and comprehensive. *Fundamental testing* consists of a lightweight set of tests that can be automated and run regularly on each :srw-wiki:`Level 1 ` platform. These tests verify that there are no major, obvious faults in the underlying code when running common combinations of grids, input data, and physics suites. *Comprehensive testing* includes the entire set of WE2E tests and covers a broader range of capabilities, configurations, and components. Eventually, new categories of tests will be added, including regression tests and unit tests. + +Before opening a PR, a minimum set of tests should be run: + + * Developers must run the fundamental test suite manually on at least one supported platform and report on the outcome in the PR template. Developers should test code modifications on as many platforms as possible. + + * To run the fundamental tests manually, run the following command from the ``tests/WE2E`` directory: + + .. code-block:: console + + ./run_WE2E_tests.py -t=fundamental -m=your_machine -a=your_account + + where ``your_machine`` is the Tier-1 machine you are running the tests on, and ``your_account`` is the account you charge your computational resources to. Refer to the :ref:`WE2E Tests ` chapter of the User's Guide for more detail on how to run SRW App tests. + + * Developers will not be required to run tests on *all* supported platforms, but if a failure is pointed out by another reviewer (or by automated testing), then it is expected that the developer will work with reviewers and code managers to ensure that the problem is resolved prior to merging. + + * If the PR impacts functionality contained within comprehensive WE2E tests not included in the fundamental test suite, the developer must run those tests on the PR. + * Any new functionality must be tested explicitly, and any new tests should be described in detail in the PR message. Depending on the impact of this functionality, new tests should be added to the suite of comprehensive WE2E tests, followed by a discussion with code managers on whether they should also be included as fundamental tests. + + * In some cases, it may be possible to modify a current test instead of creating a completely new test. Code developers introducing new capabilities should work with code managers to provide the proper configuration files, data, and other information necessary to create new tests for these capabilities. + + * When the above tests are complete and the PR has been approved by at least one code manager, a code manager will add the ``run_we2e_coverage_tests`` label to initiate fundamental testing on all Level 1 platforms via the Jenkins CI/CD pipeline. + +Testing on Jenkins +=================== + +`Jenkins `__ is an "open source automation server" that automates code testing. For the Jenkins automated testing labels, it should be noted that **ONLY** code managers should apply these labels and only after at least one code manager has given approval to the PR. The PR will not be merged until all Jenkins-based builds and testing have successfully passed. + +The following automated testing labels are available for the SRW App: + + * ``run_we2e_coverage_tests`` + * *Coming Soon:* ``run_we2e_comprehensive_tests`` + +Due to a security issue on Jenkins, where all Jenkins usernames are exposed, access to Jenkins logs through the Jenkins API has been disabled for the public. However, users can visit the `EPIC Health Dashboard `__ and click the *Jenkins Artifacts* tab to access the log files for their PR. On that page, users can identify their PR number, pull the ``we2e_test_logs-{machine}-{compiler}.tgz`` file (where ``{machine}`` is the Tier-1 platform that failed and ``{compiler}`` is the compiler used for the failed test), untar and ungzip the file, and look through the logs from the test that failed. + +Additionally, users can potentially access the directories where the Jenkins tests are run on the various machines so that they can view the tests, monitor progress, and investigate failures. The locations of the experiment directories on the various machines are as follows: + +.. list-table:: + :header-rows: 1 + + * - Tier-1 Platform + - Location of Jenkins experiment directories + * - Derecho + - /glade/derecho/scratch/epicufsrt/jenkins/workspace + * - Gaea + - /lustre/f2/dev/wpo/role.epic/jenkins/workspace/fs-srweather-app_pipeline_PR-#/gaea + * - Gaea C5 + - /lustre/f2/dev/wpo/role.epic/jenkins/workspace/fs-srweather-app_pipeline_PR-#/gaea-c5 + * - Hera (Intel) + - /scratch2/NAGAPE/epic/role.epic/jenkins/workspace/fs-srweather-app_pipeline_PR-#__2/hera + * - Hera (GNU) + - /scratch2/NAGAPE/epic/role.epic/jenkins/workspace/fs-srweather-app_pipeline_PR-#/hera + * - Hercules + - /work/noaa/epic/role-epic/jenkins/workspace/fs-srweather-app_pipeline_PR-#/hercules + * - Jet + - /lfs1/NAGAPE/epic/role.epic/jenkins/workspace/fs-srweather-app_pipeline_PR-#/jet + * - Orion + - /work/noaa/epic/role-epic/jenkins/workspace/fs-srweather-app_pipeline_PR-#/orion + +where ``#`` is the PR number. + +If the Jenkins tests fail, then the developer will need to make the necessary corrections to their PR. Unfortunately, removing and adding the label back will not kick off the Jenkins test again. Instead, the job will need to be manually re-run through Jenkins (by a member of the EPIC team). + + diff --git a/doc/INSTALL b/doc/INSTALL index e53044f6ad..53dc159bbd 100644 --- a/doc/INSTALL +++ b/doc/INSTALL @@ -12,7 +12,7 @@ git clone https://github.com/ufs-community/ufs-srweather-app.git cd ufs-srweather-app/ ./manage_externals/checkout_externals -# We can build ufs-sreweather-app binaries in two ways. +# We can build ufs-srweather-app binaries in two ways. # Method 1 # ======== diff --git a/doc/UsersGuide/Makefile b/doc/Makefile similarity index 84% rename from doc/UsersGuide/Makefile rename to doc/Makefile index 84c77bbfa2..c91f2f147b 100644 --- a/doc/UsersGuide/Makefile +++ b/doc/Makefile @@ -2,7 +2,7 @@ SPHINXOPTS = -a -n #-W SPHINXBUILD = sphinx-build -SOURCEDIR = source +SOURCEDIR = . BUILDDIR = build LINKCHECKDIR = $(BUILDDIR)/linkcheck @@ -12,12 +12,13 @@ LINKCHECKDIR = $(BUILDDIR)/linkcheck help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) -docs: +doc: + make clean $(MAKE) linkcheck $(MAKE) html linkcheck: - make clean && $(SPHINXBUILD) -b linkcheck $(SPHINXOPTS) $(SOURCEDIR) $(LINKCHECKDIR) + $(SPHINXBUILD) -b linkcheck $(SPHINXOPTS) $(SOURCEDIR) $(LINKCHECKDIR) # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). diff --git a/doc/UsersGuide/README b/doc/README similarity index 100% rename from doc/UsersGuide/README rename to doc/README diff --git a/doc/UsersGuide/source/BackgroundInfo/CCPPUpdates.rst b/doc/UsersGuide/BackgroundInfo/CCPPUpdates.rst similarity index 100% rename from doc/UsersGuide/source/BackgroundInfo/CCPPUpdates.rst rename to doc/UsersGuide/BackgroundInfo/CCPPUpdates.rst diff --git a/doc/UsersGuide/source/BackgroundInfo/Components.rst b/doc/UsersGuide/BackgroundInfo/Components.rst similarity index 99% rename from doc/UsersGuide/source/BackgroundInfo/Components.rst rename to doc/UsersGuide/BackgroundInfo/Components.rst index d861304502..1ba9349d8d 100644 --- a/doc/UsersGuide/source/BackgroundInfo/Components.rst +++ b/doc/UsersGuide/BackgroundInfo/Components.rst @@ -89,7 +89,7 @@ For more information on NEXUS, visit the GitHub repository at https://github.com Unified Workflow Tools ======================== -The Unified Workflow (UW) is a set of tools intended to unify the workflow for various UFS applications under one framework. The UW toolkit currently includes templater and configuration (config) tools, which have been incorporated into the SRW App workflow and will soon be incorporated into other UFS repositories. Additional tools are under development. More details about UW tools can be found in the `uwtools `__ GitHub repository and in the :doc:`UW Documentation `. +The Unified Workflow (UW) is a set of tools intended to unify the workflow for various UFS applications under one framework. The UW toolkit currently includes templater and configuration (config) tools, which have been incorporated into the SRW App workflow and will soon be incorporated into other UFS repositories. Additional tools are under development. More details about UW tools can be found in the `uwtools `__ GitHub repository and in the :uw:`UW Documentation <>`. Build System and Workflow ========================= diff --git a/doc/UsersGuide/source/BackgroundInfo/Introduction.rst b/doc/UsersGuide/BackgroundInfo/Introduction.rst similarity index 78% rename from doc/UsersGuide/source/BackgroundInfo/Introduction.rst rename to doc/UsersGuide/BackgroundInfo/Introduction.rst index 4c6379e295..f1a384e025 100644 --- a/doc/UsersGuide/source/BackgroundInfo/Introduction.rst +++ b/doc/UsersGuide/BackgroundInfo/Introduction.rst @@ -11,28 +11,30 @@ The UFS includes `multiple applications `__) and support for the ``RRFS_NA_13km`` predefined grid - * Addition of ``FV3_GFS_v17_p8`` physics suite (`PR #574 `__) + * Addition of the supported ``FV3_RAP`` physics suite (:srw-repo:`PR #811 `) and support for the ``RRFS_NA_13km`` predefined grid + * Addition of ``FV3_GFS_v17_p8`` physics suite (:srw-repo:`PR #574 `) * Updates to :term:`CCPP` that target the top of the ``main`` branch (which is ahead of CCPP v6.0.0). See :ref:`this page ` for a detailed summary of updates that came in ahead of the v2.2.0 release. - * Expansion of :srw-wiki:`Level 1 platforms ` to include Derecho, Hercules, and Gaea C5 (PRs `#894 `__, `#898 `__, `#911 `__) - * Transition to spack-stack modulefiles for most supported platforms to align with the UFS WM shift to spack-stack (PRs `#913 `__ and `#941 `__) - * Overhaul of the WE2E testing suite (see, e.g., PRs `#686 `__, `#732 `__, `#864 `__, `#871 `__) - * Improvements to the CI/CD automated testing pipeline (see, e.g., PRs `#707 `__ and `#847 `__) - * Incorporation of additional METplus verification capabilities (PRs `#552 `__, `#614 `__, `#757 `__, `#853 `__) - * Integration of the Unified Workflow's templater tool (`PR #793 `__) - * Ability to create a user-defined custom workflow (`PR #676 `__) - * Option to use a custom vertical coordinate file with different distribution of vertical layers (`PR #813 `__) and :ref:`documentation on how to use this feature ` (`PR #888 `__) - * Incorporation of plotting tasks into the workflow (PR `#482 `__); addition of ability to plot on both CONUS and smaller regional grid (`PR #560 `__) - * Addition of a sample verification case (`PR #500 `__) with :ref:`documentation ` - * A new :ref:`tutorial chapter ` in the documentation (`PR #584 `__) - * Incorporation of `UFS Case Studies `__ within the WE2E framework (PRs `#736 `__ and `#822 `__) - * Air Quality Modeling (AQM) capabilities (unsupported but available; see `PR #613 `__) + * Expansion of :srw-wiki:`Level 1 platforms ` to include Derecho, Hercules, and Gaea C5 (PRs :srw-repo:`#894 `, :srw-repo:`#898 `, :srw-repo:`#911 `) + * Transition to spack-stack modulefiles for most supported platforms to align with the UFS WM shift to spack-stack (PRs :srw-repo:`#913 ` and :srw-repo:`#941 `) + * Overhaul of the WE2E testing suite (see, e.g., PRs :srw-repo:`#686 `, :srw-repo:`#732 `, :srw-repo:`#864 `, :srw-repo:`#871 `) + * Improvements to the CI/CD automated testing pipeline (see, e.g., PRs :pull/707>` and :srw-repo:`#847 `) + * Incorporation of additional METplus verification capabilities (PRs :srw-repo:`#552 `, :srw-repo:`#614 `, :srw-repo:`#757 `, :srw-repo:`#853 `) + * Integration of the Unified Workflow's templater tool (:srw-repo:`PR #793 `) + * Ability to create a user-defined custom workflow (:srw-repo:`PR #676 `) + * Option to use a custom vertical coordinate file with different distribution of vertical layers (:srw-repo:`PR #813 `) and :ref:`documentation on how to use this feature ` (:srw-repo:`PR #888 `) + * Incorporation of plotting tasks into the workflow (PR :srw-repo:`#482 `); addition of ability to plot on both CONUS and smaller regional grid (:srw-repo:`PR #560 `) + * Addition of a sample verification case (:srw-repo:`PR #500 `) with :ref:`documentation ` + * A new :ref:`tutorial chapter ` in the documentation (:srw-repo:`PR #584 `) + * Incorporation of `UFS Case Studies `__ within the WE2E framework (PRs :srw-repo:`#736 ` and :srw-repo:`#822 `) + * Air Quality Modeling (AQM) capabilities (unsupported but available; see :srw-repo:`PR #613 `) * Miscellaneous documentation updates to reflect the changes above The SRW App |latestr| citation is as follows and should be used when presenting results based on research conducted with the App: UFS Development Team. (2023, Oct. 31). Unified Forecast System (UFS) Short-Range Weather (SRW) Application (Version v2.2.0). Zenodo. https://doi.org/10.5281/zenodo.10015544 +.. _ug-organization: + User's Guide Organization ============================ @@ -80,6 +82,7 @@ Reference Information * :numref:`Section %s: FAQ ` answers users' frequently asked questions. * :numref:`Section %s: Glossary ` defines important terms related to the SRW App. +.. _doc-conventions: SRW App Documentation Conventions =================================== @@ -96,6 +99,8 @@ Code that includes angle brackets (e.g., ``build__``) indica File or directory paths that begin with ``/path/to/`` should be replaced with the actual path on the user's system. For example, ``/path/to/modulefiles`` might be replaced by ``/Users/Jane.Smith/ufs-srweather-app/modulefiles``. +.. _component-docs: + Component Documentation ========================= @@ -142,8 +147,10 @@ A list of available component documentation is shown in :numref:`Table %s `__, since many "bugs" do not require a code change/fix --- instead, the user may be unfamiliar with the system and/or may have misunderstood some component of the system or the instructions, which is causing the problem. Asking for assistance in a `GitHub Discussion `__ post can help clarify whether there is a simple adjustment to fix the problem or whether there is a genuine bug in the code. Users are also encouraged to search `open issues `__ to see if their bug has already been identified. If there is a genuine bug, and there is no open issue to address it, users can report the bug by filing a `GitHub Issue `__. +If users (especially new users) believe they have identified a bug in the system, it is recommended that they first ask about the problem in :srw-repo:`GitHub Discussions `, since many "bugs" do not require a code change/fix --- instead, the user may be unfamiliar with the system and/or may have misunderstood some component of the system or the instructions, which is causing the problem. Asking for assistance in a :srw-repo:`GitHub Discussion ` post can help clarify whether there is a simple adjustment to fix the problem or whether there is a genuine bug in the code. Users are also encouraged to search :srw-repo:`open issues ` to see if their bug has already been identified. If there is a genuine bug, and there is no open issue to address it, users can report the bug by filing a :srw-repo:`GitHub Issue `. Feature Requests and Enhancements ----------------------------------- @@ -178,6 +185,8 @@ utilities, model code, and infrastructure. As described above, users can post is Contributions to the `ufs-srweather-app `__ repository should follow the guidelines contained in the :srw-wiki:`SRW App Contributor's Guide `. Additionally, users can file issues in component repositories for contributions that directly concern those repositories. For code to be accepted into a component repository, users must follow the code management rules of that component's authoritative repository. These rules are usually outlined in the component's User's Guide (see :numref:`Table %s `) or GitHub wiki for each respective repository (see :numref:`Table %s `). +.. _future-direction: + Future Direction ================= @@ -189,4 +198,4 @@ Users can expect to see incremental improvements and additional capabilities in * Incorporation of additional `Unified Workflow `__ tools. -.. bibliography:: ../references.bib +.. bibliography:: ../../references.bib diff --git a/doc/UsersGuide/source/BackgroundInfo/TechnicalOverview.rst b/doc/UsersGuide/BackgroundInfo/TechnicalOverview.rst similarity index 94% rename from doc/UsersGuide/source/BackgroundInfo/TechnicalOverview.rst rename to doc/UsersGuide/BackgroundInfo/TechnicalOverview.rst index b2a1819670..52365a86e5 100644 --- a/doc/UsersGuide/source/BackgroundInfo/TechnicalOverview.rst +++ b/doc/UsersGuide/BackgroundInfo/TechnicalOverview.rst @@ -138,7 +138,7 @@ The UFS Weather Model contains a number of sub-repositories, which are documente Repository Structure ---------------------- -The ``ufs-srweather-app`` :term:`umbrella repository` is an NCO-compliant repository. Its structure follows the standards laid out in :term:`NCEP` Central Operations (NCO) WCOSS `Implementation Standards `__. This structure is implemented using the ``local_path`` settings contained within the ``Externals.cfg`` file. After ``manage_externals/checkout_externals`` is run (see :numref:`Section %s `), the specific GitHub repositories described in :numref:`Table %s ` are cloned into the target subdirectories shown below. Directories that will be created as part of the build process appear in parentheses and will not be visible until after the build is complete. Some directories have been removed for brevity. +The ``ufs-srweather-app`` :term:`umbrella repository` is an NCO-compliant repository. Its structure follows the standards laid out in :term:`NCEP` Central Operations (NCO) WCOSS :nco:`Implementation Standards `. This structure is implemented using the ``local_path`` settings contained within the ``Externals.cfg`` file. After ``manage_externals/checkout_externals`` is run (see :numref:`Section %s `), the specific GitHub repositories described in :numref:`Table %s ` are cloned into the target subdirectories shown below. Directories that will be created as part of the build process appear in parentheses and will not be visible until after the build is complete. Some directories have been removed for brevity. .. code-block:: console @@ -193,7 +193,7 @@ The ``ufs-srweather-app`` :term:`umbrella repository` is an NCO-compliant reposi SRW App SubDirectories ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -:numref:`Table %s ` describes the contents of the most important SRW App subdirectories. :numref:`Table %s ` provides a more comprehensive explanation of the ``ufs-srweather-app`` files and subdirectories. Users can reference the `NCO Implementation Standards `__ (p. 19) for additional details on repository structure in NCO-compliant repositories. +:numref:`Table %s ` describes the contents of the most important SRW App subdirectories. :numref:`Table %s ` provides a more comprehensive explanation of the ``ufs-srweather-app`` files and subdirectories. Users can reference the :nco:`NCO Implementation Standards ` (p. 19) for additional details on repository structure in NCO-compliant repositories. .. _Subdirectories: diff --git a/doc/UsersGuide/source/BackgroundInfo/index.rst b/doc/UsersGuide/BackgroundInfo/index.rst similarity index 100% rename from doc/UsersGuide/source/BackgroundInfo/index.rst rename to doc/UsersGuide/BackgroundInfo/index.rst diff --git a/doc/UsersGuide/source/BuildingRunningTesting/AQM.rst b/doc/UsersGuide/BuildingRunningTesting/AQM.rst similarity index 100% rename from doc/UsersGuide/source/BuildingRunningTesting/AQM.rst rename to doc/UsersGuide/BuildingRunningTesting/AQM.rst diff --git a/doc/UsersGuide/source/BuildingRunningTesting/BuildSRW.rst b/doc/UsersGuide/BuildingRunningTesting/BuildSRW.rst similarity index 100% rename from doc/UsersGuide/source/BuildingRunningTesting/BuildSRW.rst rename to doc/UsersGuide/BuildingRunningTesting/BuildSRW.rst diff --git a/doc/UsersGuide/source/BuildingRunningTesting/ContainerQuickstart.rst b/doc/UsersGuide/BuildingRunningTesting/ContainerQuickstart.rst similarity index 100% rename from doc/UsersGuide/source/BuildingRunningTesting/ContainerQuickstart.rst rename to doc/UsersGuide/BuildingRunningTesting/ContainerQuickstart.rst diff --git a/doc/UsersGuide/source/BuildingRunningTesting/DefaultVarsTable.rst b/doc/UsersGuide/BuildingRunningTesting/DefaultVarsTable.rst similarity index 100% rename from doc/UsersGuide/source/BuildingRunningTesting/DefaultVarsTable.rst rename to doc/UsersGuide/BuildingRunningTesting/DefaultVarsTable.rst diff --git a/doc/UsersGuide/source/BuildingRunningTesting/Quickstart.rst b/doc/UsersGuide/BuildingRunningTesting/Quickstart.rst similarity index 100% rename from doc/UsersGuide/source/BuildingRunningTesting/Quickstart.rst rename to doc/UsersGuide/BuildingRunningTesting/Quickstart.rst diff --git a/doc/UsersGuide/source/BuildingRunningTesting/RunSRW.rst b/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst similarity index 100% rename from doc/UsersGuide/source/BuildingRunningTesting/RunSRW.rst rename to doc/UsersGuide/BuildingRunningTesting/RunSRW.rst diff --git a/doc/UsersGuide/source/BuildingRunningTesting/Tutorial.rst b/doc/UsersGuide/BuildingRunningTesting/Tutorial.rst similarity index 100% rename from doc/UsersGuide/source/BuildingRunningTesting/Tutorial.rst rename to doc/UsersGuide/BuildingRunningTesting/Tutorial.rst diff --git a/doc/UsersGuide/source/BuildingRunningTesting/VXCases.rst b/doc/UsersGuide/BuildingRunningTesting/VXCases.rst similarity index 100% rename from doc/UsersGuide/source/BuildingRunningTesting/VXCases.rst rename to doc/UsersGuide/BuildingRunningTesting/VXCases.rst diff --git a/doc/UsersGuide/source/BuildingRunningTesting/WE2Etests.rst b/doc/UsersGuide/BuildingRunningTesting/WE2Etests.rst similarity index 99% rename from doc/UsersGuide/source/BuildingRunningTesting/WE2Etests.rst rename to doc/UsersGuide/BuildingRunningTesting/WE2Etests.rst index 031038d1d7..4fca53b575 100644 --- a/doc/UsersGuide/source/BuildingRunningTesting/WE2Etests.rst +++ b/doc/UsersGuide/BuildingRunningTesting/WE2Etests.rst @@ -32,7 +32,7 @@ WE2E Test Categories WE2E tests are grouped into two categories that are of interest to code developers: ``fundamental`` and ``comprehensive`` tests. "Fundamental" tests are a lightweight but wide-reaching set of tests designed to function as a cheap "`smoke test `__" for changes to the UFS SRW App. The fundamental suite of tests runs common combinations of workflow tasks, physical domains, input data, physics suites, etc. The comprehensive suite of tests covers a broader range of combinations of capabilities, configurations, and components, ideally including all capabilities that *can* be run on a given platform. Because some capabilities are not available on all platforms (e.g., retrieving data directly from NOAA HPSS), the suite of comprehensive tests varies from machine to machine. -The list of fundamental and comprehensive tests can be viewed in the ``ufs-srweather-app/tests/WE2E/machine_suites/`` directory, and the tests are described in more detail in :doc:`this table <../tables/Tests>`. +The list of fundamental and comprehensive tests can be viewed in the ``ufs-srweather-app/tests/WE2E/machine_suites/`` directory, and the tests are described in more detail in :doc:`this table <../../tables/Tests>`. .. note:: @@ -169,7 +169,7 @@ The script to run the WE2E tests is named ``run_WE2E_tests.py`` and is located i .. note:: - The full list of WE2E tests is extensive, and some larger, high-resolution tests are computationally expensive. Estimates of walltime and core-hour cost for each test are provided in :doc:`this table <../tables/Tests>`. + The full list of WE2E tests is extensive, and some larger, high-resolution tests are computationally expensive. Estimates of walltime and core-hour cost for each test are provided in :doc:`this table <../../tables/Tests>`. Using the Test Script ---------------------- diff --git a/doc/UsersGuide/source/BuildingRunningTesting/index.rst b/doc/UsersGuide/BuildingRunningTesting/index.rst similarity index 100% rename from doc/UsersGuide/source/BuildingRunningTesting/index.rst rename to doc/UsersGuide/BuildingRunningTesting/index.rst diff --git a/doc/UsersGuide/source/CustomizingTheWorkflow/ConfigWorkflow.rst b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst similarity index 99% rename from doc/UsersGuide/source/CustomizingTheWorkflow/ConfigWorkflow.rst rename to doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst index 0c8ed8e951..10a0bcc4eb 100644 --- a/doc/UsersGuide/source/CustomizingTheWorkflow/ConfigWorkflow.rst +++ b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst @@ -20,7 +20,7 @@ USER Configuration Parameters If non-default parameters are selected for the variables in this section, they should be added to the ``user:`` section of the ``config.yaml`` file. ``RUN_ENVIR``: (Default: "nco") - This variable determines the workflow mode. The user can choose between two options: "nco" and "community". The "nco" mode uses a directory structure that mimics what is used in operations at NOAA/NCEP Central Operations (NCO) and at the NOAA/NCEP/Environmental Modeling Center (EMC), which works with NCO on pre-implementation testing. Specifics of the conventions used in "nco" mode can be found in the following `WCOSS Implementation Standards `__ document: + This variable determines the workflow mode. The user can choose between two options: "nco" and "community". The "nco" mode uses a directory structure that mimics what is used in operations at NOAA/NCEP Central Operations (NCO) and at the NOAA/NCEP/Environmental Modeling Center (EMC), which works with NCO on pre-implementation testing. Specifics of the conventions used in "nco" mode can be found in the following :nco:`WCOSS Implementation Standards <>` document: | NCEP Central Operations | WCOSS Implementation Standards @@ -699,7 +699,7 @@ A standard set of environment variables has been established for *nco* mode to s Only *community* mode is fully supported for releases. *nco* mode is used by those at the Environmental Modeling Center (EMC) and Global Systems Laboratory (GSL) who are working on pre-implementation operational testing. Other users should run the SRW App in *community* mode. ``envir_default, NET_default, model_ver_default, RUN_default``: - Standard environment variables defined in the NCEP Central Operations WCOSS Implementation Standards document. These variables are used in forming the path to various directories containing input, output, and workflow files. The variables are defined in the `WCOSS Implementation Standards `__ document (pp. 4-5) as follows: + Standard environment variables defined in the NCEP Central Operations WCOSS Implementation Standards document. These variables are used in forming the path to various directories containing input, output, and workflow files. The variables are defined in the :nco:`WCOSS Implementation Standards ` document (pp. 4-5) as follows: ``envir_default``: (Default: "para") Set to "test" during the initial testing phase, "para" when running in parallel (on a schedule), and "prod" in production. @@ -1349,7 +1349,7 @@ PLOT_ALLVARS Configuration Parameters Typically, the following parameters must be set explicitly by the user in the ``task_plot_allvars:`` section of the configuration file (``config.yaml``) when executing the plotting tasks. ``COMOUT_REF``: (Default: "") - Path to the reference experiment's COMOUT directory. This is the directory where the GRIB2 files from post-processing are located. In *community* mode (i.e., when ``RUN_ENVIR: "community"``), this directory will correspond to the location in the experiment directory where the post-processed output can be found (e.g., ``$EXPTDIR/$DATE_FIRST_CYCL/postprd``). In *nco* mode, this directory should be set to the location of the ``COMOUT`` directory and end with ``$PDY/$cyc``. For more detail on *nco* standards and directory naming conventions, see `WCOSS Implementation Standards `__ (particularly pp. 4-5). + Path to the reference experiment's COMOUT directory. This is the directory where the GRIB2 files from post-processing are located. In *community* mode (i.e., when ``RUN_ENVIR: "community"``), this directory will correspond to the location in the experiment directory where the post-processed output can be found (e.g., ``$EXPTDIR/$DATE_FIRST_CYCL/postprd``). In *nco* mode, this directory should be set to the location of the ``COMOUT`` directory and end with ``$PDY/$cyc``. For more detail on *nco* standards and directory naming conventions, see :nco:`WCOSS Implementation Standards ` (particularly pp. 4-5). ``PLOT_FCST_START``: (Default: 0) The starting forecast hour for the plotting task. For example, if a forecast starts at 18h/18z, this is considered the 0th forecast hour, so "starting forecast hour" should be 0, not 18. If a forecast starts at 18h/18z, but the user only wants plots from the 6th forecast hour on, "starting forecast hour" should be 6. diff --git a/doc/UsersGuide/source/CustomizingTheWorkflow/DefineWorkflow.rst b/doc/UsersGuide/CustomizingTheWorkflow/DefineWorkflow.rst similarity index 100% rename from doc/UsersGuide/source/CustomizingTheWorkflow/DefineWorkflow.rst rename to doc/UsersGuide/CustomizingTheWorkflow/DefineWorkflow.rst diff --git a/doc/UsersGuide/source/CustomizingTheWorkflow/InputOutputFiles.rst b/doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst similarity index 98% rename from doc/UsersGuide/source/CustomizingTheWorkflow/InputOutputFiles.rst rename to doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst index d8266e74e8..128b080655 100644 --- a/doc/UsersGuide/source/CustomizingTheWorkflow/InputOutputFiles.rst +++ b/doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst @@ -168,8 +168,8 @@ If users wish to modify the fields or levels that are output from the UPP, they This process requires advanced knowledge of which fields can be output for the UFS Weather Model. UPP Product Output Tables for the UFS SRW LAM Grid: - * :doc:`3D Native Hybrid Level Fields <../tables/SRW_NATLEV_table>` - * :doc:`3D Pressure Level Fields <../tables/SRW_PRSLEV_table>` + * :doc:`3D Native Hybrid Level Fields <../../tables/SRW_NATLEV_table>` + * :doc:`3D Pressure Level Fields <../../tables/SRW_PRSLEV_table>` Use the instructions in the `UPP User's Guide `__ to make modifications to the ``fv3lam.xml`` file and to remake the flat text file, called ``postxconfig-NT-fv3lam.txt`` (default), that the UPP reads. @@ -364,11 +364,6 @@ Google Cloud: * HRRR: https://console.cloud.google.com/marketplace/product/noaa-public/hrrr -FTP Data Repository (data for SRW Release v1.0.0 & v1.0.1): - -* https://ftp.emc.ncep.noaa.gov/EIB/UFS/SRW/v1p0/fix/ -* https://ftp.emc.ncep.noaa.gov/EIB/UFS/SRW/v1p0/simple_test_case/ - Others: * University of Utah HRRR archive: https://home.chpc.utah.edu/~u0553130/Brian_Blaylock/cgi-bin/hrrr_download.cgi diff --git a/doc/UsersGuide/source/CustomizingTheWorkflow/LAMGrids.rst b/doc/UsersGuide/CustomizingTheWorkflow/LAMGrids.rst similarity index 100% rename from doc/UsersGuide/source/CustomizingTheWorkflow/LAMGrids.rst rename to doc/UsersGuide/CustomizingTheWorkflow/LAMGrids.rst diff --git a/doc/UsersGuide/source/CustomizingTheWorkflow/TemplateVars.rst b/doc/UsersGuide/CustomizingTheWorkflow/TemplateVars.rst similarity index 100% rename from doc/UsersGuide/source/CustomizingTheWorkflow/TemplateVars.rst rename to doc/UsersGuide/CustomizingTheWorkflow/TemplateVars.rst diff --git a/doc/UsersGuide/source/CustomizingTheWorkflow/index.rst b/doc/UsersGuide/CustomizingTheWorkflow/index.rst similarity index 100% rename from doc/UsersGuide/source/CustomizingTheWorkflow/index.rst rename to doc/UsersGuide/CustomizingTheWorkflow/index.rst diff --git a/doc/UsersGuide/source/Reference/FAQ.rst b/doc/UsersGuide/Reference/FAQ.rst similarity index 100% rename from doc/UsersGuide/source/Reference/FAQ.rst rename to doc/UsersGuide/Reference/FAQ.rst diff --git a/doc/UsersGuide/source/Reference/Glossary.rst b/doc/UsersGuide/Reference/Glossary.rst similarity index 98% rename from doc/UsersGuide/source/Reference/Glossary.rst rename to doc/UsersGuide/Reference/Glossary.rst index dc1f810306..90f9c8ab89 100644 --- a/doc/UsersGuide/source/Reference/Glossary.rst +++ b/doc/UsersGuide/Reference/Glossary.rst @@ -90,7 +90,7 @@ Glossary `Earth System Modeling Framework `__. The ESMF defines itself as “a suite of software tools for developing high-performance, multi-component Earth science modeling applications.” ex-scripts - Scripting layer (contained in ``ufs-srweather-app/scripts/``) that should be called by a :term:`J-job ` for each workflow componentto run a specific task or sub-task in the workflow. The different scripting layers are described in detail in the `NCO Implementation Standards document `__ + Scripting layer (contained in ``ufs-srweather-app/scripts/``) that should be called by a :term:`J-job ` for each workflow componentto run a specific task or sub-task in the workflow. The different scripting layers are described in detail in the :nco:`NCO Implementation Standards document ` FV3 The Finite-Volume Cubed-Sphere :term:`dynamical core` (dycore). Developed at NOAA's `Geophysical @@ -133,7 +133,7 @@ Glossary Initial conditions J-jobs - Scripting layer (contained in ``ufs-srweather-app/jobs/``) that should be directly called for each workflow component (either on the command line or by the workflow manager) to run a specific task in the workflow. The different scripting layers are described in detail in the `NCO Implementation Standards document `__ + Scripting layer (contained in ``ufs-srweather-app/jobs/``) that should be directly called for each workflow component (either on the command line or by the workflow manager) to run a specific task in the workflow. The different scripting layers are described in detail in the :nco:`NCO Implementation Standards document ` JEDI The Joint Effort for Data assimilation Integration (`JEDI `__) is a unified and versatile data assimilation (DA) system for Earth System Prediction. It aims to enable efficient research and accelerated transition from research to operations by providing a framework that takes into account all components of the Earth system in a consistent manner. The JEDI software package can run on a variety of platforms and for a variety of purposes, and it is designed to readily accommodate new atmospheric and oceanic models and new observation systems. The `JEDI User's Guide `__ contains extensive information on the software. diff --git a/doc/UsersGuide/source/Reference/RocotoInfo.rst b/doc/UsersGuide/Reference/RocotoInfo.rst similarity index 100% rename from doc/UsersGuide/source/Reference/RocotoInfo.rst rename to doc/UsersGuide/Reference/RocotoInfo.rst diff --git a/doc/UsersGuide/source/Reference/index.rst b/doc/UsersGuide/Reference/index.rst similarity index 100% rename from doc/UsersGuide/source/Reference/index.rst rename to doc/UsersGuide/Reference/index.rst diff --git a/doc/UsersGuide/source/SSHIntro.rst b/doc/UsersGuide/SSHIntro.rst similarity index 100% rename from doc/UsersGuide/source/SSHIntro.rst rename to doc/UsersGuide/SSHIntro.rst diff --git a/doc/UsersGuide/index.rst b/doc/UsersGuide/index.rst new file mode 100644 index 0000000000..58c6fe6089 --- /dev/null +++ b/doc/UsersGuide/index.rst @@ -0,0 +1,10 @@ +User's Guide +============== + +.. toctree:: + :maxdepth: 3 + + BackgroundInfo/index + BuildingRunningTesting/index + CustomizingTheWorkflow/index + Reference/index diff --git a/doc/UsersGuide/source/_static/theme_overrides.css b/doc/UsersGuide/source/_static/theme_overrides.css deleted file mode 100644 index a9672944ba..0000000000 --- a/doc/UsersGuide/source/_static/theme_overrides.css +++ /dev/null @@ -1,24 +0,0 @@ -/* override table width restrictions */ -@media screen and (min-width: 767px) { - - .wy-table-responsive table td { - /* !important prevents the common CSS stylesheets from overriding - this as on RTD they are loaded after this stylesheet */ - white-space: normal !important; - } - - .wy-nav-content { - max-width: 100% !important; - } - - /* .wy-table-responsive { */ - /* overflow: visible !important; */ - /* } */ - -} - -/* Darken navbar blue background for contrast with logo */ -.wy-side-nav-search, .wy-nav-top { - background: #2779B0; -} - diff --git a/doc/UsersGuide/source/index.rst b/doc/UsersGuide/source/index.rst deleted file mode 100644 index 8f975b7cd1..0000000000 --- a/doc/UsersGuide/source/index.rst +++ /dev/null @@ -1,16 +0,0 @@ -.. UFS SR Weather App Users Guide, created by - sphinx-quickstart on Tue Feb 12 08:48:32 2019. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -UFS Short-Range Weather App User's Guide (|version|) -===================================================== - -.. toctree:: - :numbered: - :maxdepth: 3 - - BackgroundInfo/index - BuildingRunningTesting/index - CustomizingTheWorkflow/index - Reference/index diff --git a/doc/UsersGuide/source/_static/custom.css b/doc/_static/custom.css similarity index 100% rename from doc/UsersGuide/source/_static/custom.css rename to doc/_static/custom.css diff --git a/doc/_static/theme_overrides.css b/doc/_static/theme_overrides.css new file mode 100644 index 0000000000..a4e5cab82f --- /dev/null +++ b/doc/_static/theme_overrides.css @@ -0,0 +1,26 @@ +.wy-table-responsive table td { + /* !important prevents the common CSS stylesheets from overriding + this as on RTD they are loaded after this stylesheet */ + white-space: normal !important; +} + +.wy-nav-content { + max-width: 100% !important; +} + +.wy-table-responsive { + overflow: visible !important; +} + +/* Darken background for contrast with logo */ +.wy-side-nav-search, .wy-nav-top { + background: #2779B0; +} + +/* Darken navbar blue background for contrast with logo */ +.wy-side-nav-search, .wy-nav-top { + background: #2779B0; +} + + + diff --git a/doc/UsersGuide/source/_templates/.gitignore b/doc/_templates/.gitignore similarity index 100% rename from doc/UsersGuide/source/_templates/.gitignore rename to doc/_templates/.gitignore diff --git a/doc/UsersGuide/source/conf.py b/doc/conf.py similarity index 85% rename from doc/UsersGuide/source/conf.py rename to doc/conf.py index cc348f18da..6b0f461ba8 100644 --- a/doc/UsersGuide/source/conf.py +++ b/doc/conf.py @@ -14,7 +14,8 @@ # import os import sys -sys.path.insert(0, os.path.abspath('.')) +import sphinx +from sphinx.util import logging @@ -32,37 +33,10 @@ numfig = True -# Avoid a 403 Forbidden error when accessing certain links (e.g., noaa.gov) -user_agent = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36" - -# Ignore working links that cause a linkcheck 403 error. -linkcheck_ignore = [r'https://www\.intel\.com/content/www/us/en/docs/cpp\-compiler/developer\-guide\-reference/2021\-10/thread\-affinity\-interface\.html', - r'https://www\.intel\.com/content/www/us/en/developer/tools/oneapi/hpc\-toolkit\-download\.html', - ] - -# Ignore anchor tags for SRW App data bucket. Shows Not Found even when they exist. -linkcheck_anchors_ignore = [r"current_srw_release_data/", - r"input_model_data/.*", - r"fix.*", - r"sample_cases/.*", - ] - -linkcheck_allowed_redirects = {r"https://github\.com/ufs-community/ufs-srweather-app/wiki/.*": r"https://raw\.githubusercontent\.com/wiki/ufs-community/ufs-srweather-app/.*", - r"https://github\.com/ufs-community/ufs-srweather-app/issues/new/choose": r"https://github\.com/login", - r"https://doi\.org/.*/zenodo\..*": r"https://zenodo\.org/records/.*", - r"https://doi\.org/.*": r"https://gmd\.copernicus\.org/.*", - r"https://rdhpcs\-common\-docs\.rdhpcs\.noaa\.gov/wiki/index\.php/Transferring\_Data": - r"https://sso\.noaa\.gov\:443/openam/SSORedirect/metaAlias/noaa\-online/idp\?SAMLRequest\=.*" - } # -- General configuration --------------------------------------------------- -# If your documentation needs a minimal Sphinx version, state it here. -# needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. +# Sphinx extension module names: extensions = [ 'sphinx_rtd_theme', 'sphinx.ext.autodoc', @@ -78,7 +52,6 @@ ] bibtex_bibfiles = ['references.bib'] -#bibtex_bibfiles = ['refs.bib'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] @@ -99,7 +72,8 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] +exclude_patterns = ['_build', + '.DS_Store',] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' @@ -117,6 +91,37 @@ .. |data| replace:: develop """ +# Linkcheck options + +# Avoid a 403 Forbidden error when accessing certain links (e.g., noaa.gov) +user_agent = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36" + +# Ignore working links that cause a linkcheck 403 error. +linkcheck_ignore = [r'https://www\.intel\.com/content/www/us/en/docs/cpp\-compiler/developer\-guide\-reference/2021\-10/thread\-affinity\-interface\.html', + r'https://www\.intel\.com/content/www/us/en/developer/tools/oneapi/hpc\-toolkit\-download\.html', + #r'https://glossary.ametsoc.org/.*', + ] + +# Ignore anchor tags for SRW App data bucket. Shows Not Found even when they exist. +linkcheck_anchors_ignore = [r"current_srw_release_data/", + r"input_model_data/.*", + r"fix.*", + r"sample_cases/.*", + ] + +linkcheck_allowed_redirects = {r"https://github\.com/ufs-community/ufs-srweather-app/wiki/.*": + r"https://raw\.githubusercontent\.com/wiki/ufs-community/ufs-srweather-app/.*", + r"https://github\.com/ufs-community/ufs-srweather-app/issues/new/choose": + r"https://github\.com/login", + r"https://doi\.org/.*/zenodo\..*": r"https://zenodo\.org/records/.*", + r"https://doi\.org/.*": r"https://gmd\.copernicus\.org/.*", + r"https://rdhpcs\-common\-docs\.rdhpcs\.noaa\.gov/wiki/index\.php/Transferring\_Data": + r"https://sso\.noaa\.gov\:443/openam/SSORedirect/metaAlias/noaa\-online/idp\?SAMLRequest\=.*", + r"https://github\.com/ufs-community/ufs\-srweather\-app/issues/.*": + r"https://github\.com/login\?return\_to\=https.*", + } + + # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for @@ -124,6 +129,7 @@ # html_theme = 'sphinx_rtd_theme' html_theme_path = ["_themes", ] +html_logo= "https://github.com/ufs-community/ufs/wiki/images/ufs-epic-logo.png" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -132,7 +138,7 @@ # html_theme_options = {} html_theme_options = { "body_max_width": "none", - "navigation_depth": 6, + "navigation_depth": 8, } # Add any paths that contain custom static files (such as style sheets) here, @@ -191,7 +197,7 @@ def setup(app): # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'UFS-SRWeatherApp.tex', 'UFS Short-Range Weather App Users Guide', + (master_doc, 'UFS-SRWeatherApp.tex', 'UFS Short-Range Weather App Documentation', ' ', 'manual'), ] @@ -201,7 +207,7 @@ def setup(app): # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (master_doc, 'UFS-SRWeatherApp', 'UFS Short-Range Weather App Users Guide', + (master_doc, 'UFS-SRWeatherApp', 'UFS Short-Range Weather App Documentation', [author], 1) ] @@ -253,16 +259,16 @@ def setup(app): 'ccpp-techdoc': ('https://ccpp-techdoc.readthedocs.io/en/ufs_srw_app_v2.2.0/', None), 'stochphys': ('https://stochastic-physics.readthedocs.io/en/latest/', None), 'srw_v2.2.0': ('https://ufs-srweather-app.readthedocs.io/en/release-public-v2.2.0/', None), - 'uw': ('https://uwtools.readthedocs.io/en/main', None), } # -- Options for extlinks extension --------------------------------------- extlinks_detect_hardcoded_links = True -extlinks = {'srw-wiki': ('https://github.com/ufs-community/ufs-srweather-app/wiki/%s','%s'), - } - -# -- Options for todo extension ---------------------------------------------- - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True +extlinks = {'github-docs': ('https://docs.github.com/en/%s', '%s'), + 'nco': ('https://www.nco.ncep.noaa.gov/idsb/implementation_standards/%s', '%s'), + "rst": ("https://www.sphinx-doc.org/en/master/usage/restructuredtext/%s", "%s"), + "rtd": ("https://readthedocs.org/projects/ufs-srweather-app/%s", "%s"), + 'srw-repo': ('https://github.com/ufs-community/ufs-srweather-app/%s', '%s'), + 'srw-wiki': ('https://github.com/ufs-community/ufs-srweather-app/wiki/%s','%s'), + 'uw': ('https://uwtools.readthedocs.io/en/main/%s', '%s'), + } \ No newline at end of file diff --git a/doc/index.rst b/doc/index.rst new file mode 100644 index 0000000000..c8cf2b32fc --- /dev/null +++ b/doc/index.rst @@ -0,0 +1,9 @@ +UFS Short-Range Weather App Documentation (|version|) +===================================================== + +.. toctree:: + :numbered: + :maxdepth: 3 + + UsersGuide/index + ContribGuide/index diff --git a/doc/UsersGuide/make.bat b/doc/make.bat similarity index 100% rename from doc/UsersGuide/make.bat rename to doc/make.bat diff --git a/doc/UsersGuide/source/references.bib b/doc/references.bib similarity index 100% rename from doc/UsersGuide/source/references.bib rename to doc/references.bib diff --git a/doc/UsersGuide/requirements.in b/doc/requirements.in similarity index 100% rename from doc/UsersGuide/requirements.in rename to doc/requirements.in diff --git a/doc/UsersGuide/requirements.txt b/doc/requirements.txt similarity index 97% rename from doc/UsersGuide/requirements.txt rename to doc/requirements.txt index 60c67635ea..0671225d72 100644 --- a/doc/UsersGuide/requirements.txt +++ b/doc/requirements.txt @@ -12,7 +12,7 @@ certifi==2024.2.2 # via requests charset-normalizer==3.3.2 # via requests -docutils==0.19 +docutils==0.20.1 # via # pybtex-docutils # sphinx @@ -58,7 +58,7 @@ sphinx-rtd-theme==2.0.0 # via -r requirements.in sphinxcontrib-applehelp==1.0.8 # via sphinx -sphinxcontrib-bibtex==2.5.0 +sphinxcontrib-bibtex==2.6.2 # via -r requirements.in sphinxcontrib-devhelp==1.0.6 # via sphinx diff --git a/doc/UsersGuide/source/tables/SRW_NATLEV_table.csv b/doc/tables/SRW_NATLEV_table.csv similarity index 100% rename from doc/UsersGuide/source/tables/SRW_NATLEV_table.csv rename to doc/tables/SRW_NATLEV_table.csv diff --git a/doc/UsersGuide/source/tables/SRW_NATLEV_table.rst b/doc/tables/SRW_NATLEV_table.rst similarity index 100% rename from doc/UsersGuide/source/tables/SRW_NATLEV_table.rst rename to doc/tables/SRW_NATLEV_table.rst diff --git a/doc/UsersGuide/source/tables/SRW_PRSLEV_table.csv b/doc/tables/SRW_PRSLEV_table.csv similarity index 100% rename from doc/UsersGuide/source/tables/SRW_PRSLEV_table.csv rename to doc/tables/SRW_PRSLEV_table.csv diff --git a/doc/UsersGuide/source/tables/SRW_PRSLEV_table.rst b/doc/tables/SRW_PRSLEV_table.rst similarity index 100% rename from doc/UsersGuide/source/tables/SRW_PRSLEV_table.rst rename to doc/tables/SRW_PRSLEV_table.rst diff --git a/doc/UsersGuide/source/tables/Tests.csv b/doc/tables/Tests.csv similarity index 100% rename from doc/UsersGuide/source/tables/Tests.csv rename to doc/tables/Tests.csv diff --git a/doc/UsersGuide/source/tables/Tests.rst b/doc/tables/Tests.rst similarity index 100% rename from doc/UsersGuide/source/tables/Tests.rst rename to doc/tables/Tests.rst diff --git a/doc/tables/code-managers.csv b/doc/tables/code-managers.csv new file mode 100644 index 0000000000..ec665b0fa7 --- /dev/null +++ b/doc/tables/code-managers.csv @@ -0,0 +1,21 @@ +Affiliation;Code Manager;Areas of Expertise +EPIC;**Michael Lueken (@MichaelLueken)**;Lead SRW App code manager +GSL;Daniel Abdi (@danielabdi-noaa);Workflow generation, testing RRFS on the cloud, environment modules +GSL;Jeff Beck (@JeffBeck-NOAA);SRW App configuration/workflow, code management, meteorological evaluation +EMC;Ben Blake (@BenjaminBlake-NOAA);Output visualization, Rocoto +EMC;Brian Curtis (@BrianCurtis-NOAA);Operational air quality modeling (Online-CMAQ), code management +GSL;Christopher Harrop (@christopherwharrop-noaa);Rocoto, code management, and testing +GSL;Christina Holt (@christinaholtNOAA);Workflow, conda environment support, testing, and code management +EPIC;Chan-Hoo Jeon (@chan-hoo);Air quality modeling (Online-CMAQ), NCO Implementation Standards, Workflow +EPIC;Jong Kim (@jkbk2004);UFS Weather Model configuration, forecast sensitivity analysis, data assimilation +NCAR;Mike Kavulich (@mkavulich);Workflow, CCPP/physics, code management, WE2E testing, verification +GSL;Gerard Ketefian (@gsketefian);Verification/METplus tasks, Jinja templates, and workflow scripts +NCAR;Will Mayfield (@willmayfield);Verification/METplus tasks +GSL;Linlin Pan (@panll);Workflow, CCPP/physics, and verification +EPIC;Natalie Perlin (@natalie-perlin);Generic Linux/Mac installations, hpc-stack/spack-stack +EPIC;Gillian Petro (@gspetro-NOAA);Documentation, User Support +EPIC;Mark Potts (@mark-a-potts);HPC systems +EPIC;Edward Snyder (@EdwardSnyder-NOAA);WE2E testing, input data +GLERL;David Wright (@dmwright526);FVCOM integration, output visualization, preprocessing tasks +EPIC;Ratko Vasic (@RatkoVasic-NOAA);Workflow, testing, and spack-stack maintenance +NSSL;Yunheng Wang (@ywangwof);HPC systems, code management, and regional workflow (especially on Stampede, Jet and NSSL computers) diff --git a/doc/UsersGuide/source/tables/fix_file_list.rst b/doc/tables/fix_file_list.rst similarity index 100% rename from doc/UsersGuide/source/tables/fix_file_list.rst rename to doc/tables/fix_file_list.rst From d4043cef4ff2550d1ad2a757f58ded08e8b23602 Mon Sep 17 00:00:00 2001 From: "Chan-Hoo.Jeon-NOAA" <60152248+chan-hoo@users.noreply.github.com> Date: Tue, 5 Mar 2024 09:23:27 -0500 Subject: [PATCH 05/42] [develop] Fix sample script and WE2E test for AQM (#1040) * Fixes the failure on the sample script ush/config.aqm.community.yaml. * Fixes the failure on the WE2E test test/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml for AQM. --- .gitignore | 1 + modulefiles/tasks/hera/aqm_lbcs.local.lua | 2 +- scripts/exregional_fire_emission.sh | 4 ++-- tests/WE2E/run_WE2E_tests.py | 4 ---- ...onfig.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml | 15 ++++++--------- ush/config.aqm.community.yaml | 17 +++++++---------- ush/machine/hera.yaml | 4 ++-- 7 files changed, 19 insertions(+), 28 deletions(-) diff --git a/.gitignore b/.gitignore index 8566703a22..ad778d0bc1 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ bin/ +conda/ exec/ build/ fix/ diff --git a/modulefiles/tasks/hera/aqm_lbcs.local.lua b/modulefiles/tasks/hera/aqm_lbcs.local.lua index 5a7b0cece6..0e7132d749 100644 --- a/modulefiles/tasks/hera/aqm_lbcs.local.lua +++ b/modulefiles/tasks/hera/aqm_lbcs.local.lua @@ -1,2 +1,2 @@ +load("python_srw_cmaq") load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("miniconda_regional_workflow_cmaq") diff --git a/scripts/exregional_fire_emission.sh b/scripts/exregional_fire_emission.sh index e1d2101477..ef1b4e291d 100755 --- a/scripts/exregional_fire_emission.sh +++ b/scripts/exregional_fire_emission.sh @@ -70,8 +70,8 @@ hh_mh1=${CDATE_mh1:8:2} aqm_fire_file_fn="${AQM_FIRE_FILE_PREFIX}_${yyyymmdd}_t${hh}z${AQM_FIRE_FILE_SUFFIX}" # Check if the fire file exists in the designated directory -if [ -e "${DCOMINfire}/${yyyymmdd}/${aqm_fire_file_fn}" ]; then - cp_vrfy "${DCOMINfire}/${yyyymmdd}/${aqm_fire_file_fn}" "${FIRE_EMISSION_STAGING_DIR}" +if [ -e "${DCOMINfire}/${aqm_fire_file_fn}" ]; then + cp_vrfy "${DCOMINfire}/${aqm_fire_file_fn}" "${FIRE_EMISSION_STAGING_DIR}" else # Copy raw data for ihr in {0..23}; do diff --git a/tests/WE2E/run_WE2E_tests.py b/tests/WE2E/run_WE2E_tests.py index 5c720e7d93..5e1109c1ea 100755 --- a/tests/WE2E/run_WE2E_tests.py +++ b/tests/WE2E/run_WE2E_tests.py @@ -224,10 +224,6 @@ def run_we2e_tests(homedir, args) -> None: logging.debug(f'Setting {obvar} = {mach_path} from machine file') test_cfg['platform'][obvar] = mach_path - if 'cpl_aqm_parm' in test_cfg: - test_aqm_input_basedir = machine_defaults['platform']['TEST_AQM_INPUT_BASEDIR'] - test_cfg['cpl_aqm_parm']['DCOMINfire_default'] = f"{test_aqm_input_basedir}/RAVE_fire" - if args.compiler == "gnu": # 2D decomposition doesn't work with GNU compilers. Deactivate 2D decomposition for GNU if 'task_run_post' in test_cfg: diff --git a/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml b/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml index 789c5e9674..f244a527cf 100644 --- a/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml +++ b/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml @@ -5,13 +5,10 @@ user: workflow: PREDEF_GRID_NAME: AQM_NA_13km CCPP_PHYS_SUITE: FV3_GFS_v16 - DATE_FIRST_CYCL: '2023021700' - DATE_LAST_CYCL: '2023021706' - INCR_CYCL_FREQ: 6 - FCST_LEN_HRS: -1 - FCST_LEN_CYCL: - - 6 - - 12 + DATE_FIRST_CYCL: '2023111000' + DATE_LAST_CYCL: '2023111100' + INCR_CYCL_FREQ: 24 + FCST_LEN_HRS: 24 PREEXISTING_DIR_METHOD: rename DIAG_TABLE_TMPL_FN: diag_table_aqm.FV3_GFS_v16 FIELD_TABLE_TMPL_FN: field_table_aqm.FV3_GFS_v16 @@ -24,7 +21,7 @@ rocoto: task_aqm_ics_ext: metatask_run_ensemble: task_run_fcst_mem#mem#: - walltime: 02:00:00 + walltime: 01:00:00 task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS FV3GFS_FILE_FMT_ICS: netcdf @@ -41,7 +38,7 @@ task_run_fcst: LAYOUT_X: 50 LAYOUT_Y: 34 BLOCKSIZE: 16 - RESTART_INTERVAL: 6 + RESTART_INTERVAL: 12 24 QUILTING: true PRINT_ESMF: false DO_FCST_RESTART: false diff --git a/ush/config.aqm.community.yaml b/ush/config.aqm.community.yaml index 30e391edf2..7586719f2e 100644 --- a/ush/config.aqm.community.yaml +++ b/ush/config.aqm.community.yaml @@ -10,16 +10,13 @@ workflow: EXPT_SUBDIR: aqm_community_aqmna13 PREDEF_GRID_NAME: AQM_NA_13km CCPP_PHYS_SUITE: FV3_GFS_v16 - DATE_FIRST_CYCL: '2023021700' - DATE_LAST_CYCL: '2023021706' - INCR_CYCL_FREQ: 6 - FCST_LEN_HRS: -1 - FCST_LEN_CYCL: - - 6 - - 12 + DATE_FIRST_CYCL: '2023111000' + DATE_LAST_CYCL: '2023111100' + INCR_CYCL_FREQ: 24 + FCST_LEN_HRS: 24 PREEXISTING_DIR_METHOD: rename VERBOSE: true - DEBUG: true + DEBUG: false COMPILER: intel DIAG_TABLE_TMPL_FN: diag_table_aqm.FV3_GFS_v16 FIELD_TABLE_TMPL_FN: field_table_aqm.FV3_GFS_v16 @@ -32,7 +29,7 @@ rocoto: task_aqm_ics_ext: metatask_run_ensemble: task_run_fcst_mem#mem#: - walltime: 02:00:00 + walltime: 01:00:00 task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS FV3GFS_FILE_FMT_ICS: netcdf @@ -47,7 +44,7 @@ task_run_fcst: LAYOUT_X: 50 LAYOUT_Y: 34 BLOCKSIZE: 16 - RESTART_INTERVAL: 6 + RESTART_INTERVAL: 12 24 QUILTING: true PRINT_ESMF: false DO_FCST_RESTART: false diff --git a/ush/machine/hera.yaml b/ush/machine/hera.yaml index 8d751ae891..0aadaa6d8b 100644 --- a/ush/machine/hera.yaml +++ b/ush/machine/hera.yaml @@ -38,8 +38,8 @@ platform: EXTRN_MDL_DATA_STORES: hpss aws nomads cpl_aqm_parm: - AQM_CONFIG_DIR: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/aqm/epa/data - DCOMINbio_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/aqm/bio + AQM_CONFIG_DIR: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/fix/aqm/epa/data + DCOMINbio_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/fix/aqm/bio DCOMINdust_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/FENGSHA DCOMINcanopy_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/canopy DCOMINfire_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/RAVE_fire From 86b8cd417e82a882b88e5bfe664870d926727bee Mon Sep 17 00:00:00 2001 From: EdwardSnyder-NOAA <96196752+EdwardSnyder-NOAA@users.noreply.github.com> Date: Fri, 8 Mar 2024 08:11:15 -0600 Subject: [PATCH 06/42] [develop] Add integration test job (#1042) This update adds a test job to the workflow. It was originally written with pytest but because of some file naming issues, the python package unittest was used instead. The test checks for the existence of netcdf files from the weather model. The necessary scripts were added or modified to incorporate the integration job into the workflow. A wrapper script was also added. --------- Co-authored-by: Parallel Works app-run user Co-authored-by: michael.lueken --- .../BuildingRunningTesting/RunSRW.rst | 57 ++++---- jobs/JREGIONAL_INTEGRATION_TEST | 88 ++++++++++++ parm/wflow/default_workflow.yaml | 2 +- parm/wflow/test.yaml | 42 ++++++ scripts/exregional_integration_test.py | 134 ++++++++++++++++++ ...fig.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml | 2 +- ...g.custom_ESGgrid_Great_Lakes_snow_8km.yaml | 2 +- ...FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml | 2 +- ...FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml | 2 +- ...FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml | 2 +- ...GFS_lbcs_FV3GFS_suite_GFS_v17_p8_plot.yaml | 2 +- ...US_25km_ics_FV3GFS_lbcs_RAP_suite_RAP.yaml | 2 +- ...S_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml | 2 +- ..._FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml | 2 +- ..._ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml | 2 +- ...d_3km_ics_HRRR_lbcs_RAP_suite_WoFS_v0.yaml | 3 + ...s_RAP_lbcs_RAP_suite_RRFS_v1beta_plot.yaml | 2 +- ..._ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16.yaml | 2 +- ..._lbcs_FV3GFS_timeoffset_suite_GFS_v16.yaml | 2 +- ...FS_suite_GFS_v15_thompson_mynn_lam3km.yaml | 2 +- ...act_25km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml | 2 +- ...g.MET_ensemble_verification_winter_wx.yaml | 2 +- .../config.deactivate_tasks.yaml | 1 + .../wflow_features/config.nco_ensemble.yaml | 2 +- ush/wrappers/run_integration_test.sh | 14 ++ 25 files changed, 332 insertions(+), 43 deletions(-) create mode 100755 jobs/JREGIONAL_INTEGRATION_TEST create mode 100644 parm/wflow/test.yaml create mode 100755 scripts/exregional_integration_test.py create mode 100755 ush/wrappers/run_integration_test.sh diff --git a/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst b/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst index 9546471310..9d42aaf0dc 100644 --- a/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst +++ b/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst @@ -791,6 +791,8 @@ The ``FV3LAM_wflow.xml`` file runs the specific j-job scripts (``jobs/JREGIONAL_ - Run the forecast model (UFS Weather Model) * - run_post_* - Run the post-processing tool (UPP) + * - integration_test_* + - Run integration test In addition to the baseline tasks described in :numref:`Table %s ` above, users may choose to run a variety of optional tasks, including plotting and verification tasks. @@ -983,6 +985,7 @@ The workflow run is complete when all tasks have "SUCCEEDED". If everything goes 201906151800 run_post_mem000_f001 4953245 SUCCEEDED 0 1 4.0 ... 201906151800 run_post_mem000_f012 4953381 SUCCEEDED 0 1 7.0 + 201906151800 integration_test_mem000 4953237 SUCCEEDED 0 1 7.0 If users choose to run METplus verification tasks as part of their experiment, the output above will include additional lines after ``run_post_mem000_f012``. The output will resemble the following but may be significantly longer when using ensemble verification: @@ -1058,6 +1061,7 @@ This will output the last 40 lines of the log file, which lists the status of th 201906151800 run_post_mem000_f004 - - - - - 201906151800 run_post_mem000_f005 - - - - - 201906151800 run_post_mem000_f006 - - - - - + 201906151800 integration_test_mem000 Summary of workflow status: ~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1168,6 +1172,7 @@ The SRW App workflow can be run using standalone shell scripts in cases where th ./run_make_lbcs.sh ./run_fcst.sh ./run_post.sh + ./run_integration_test.sh Each task should finish with error code 0. For example: @@ -1184,31 +1189,33 @@ Check the batch script output file in your experiment directory for a “SUCCESS processors and wall clock time is a good starting point for NOAA HPC systems when running a 48-h forecast on the 25-km CONUS domain. For a brief description of tasks, see :numref:`Table %s `. - +------------+------------------------+----------------+----------------------------+ - | **Stage/** | **Task Run Script** | **Number of** | **Wall Clock Time (H:mm)** | - | | | **Processors** | | - +============+========================+================+============================+ - | 1 | run_get_ics.sh | 1 | 0:20 (depends on HPSS vs | - | | | | FTP vs staged-on-disk) | - +------------+------------------------+----------------+----------------------------+ - | 1 | run_get_lbcs.sh | 1 | 0:20 (depends on HPSS vs | - | | | | FTP vs staged-on-disk) | - +------------+------------------------+----------------+----------------------------+ - | 1 | run_make_grid.sh | 24 | 0:20 | - +------------+------------------------+----------------+----------------------------+ - | 2 | run_make_orog.sh | 24 | 0:20 | - +------------+------------------------+----------------+----------------------------+ - | 3 | run_make_sfc_climo.sh | 48 | 0:20 | - +------------+------------------------+----------------+----------------------------+ - | 4 | run_make_ics.sh | 48 | 0:30 | - +------------+------------------------+----------------+----------------------------+ - | 4 | run_make_lbcs.sh | 48 | 0:30 | - +------------+------------------------+----------------+----------------------------+ - | 5 | run_fcst.sh | 48 | 0:30 | - +------------+------------------------+----------------+----------------------------+ - | 6 | run_post.sh | 48 | 0:25 (2 min per output | - | | | | forecast hour) | - +------------+------------------------+----------------+----------------------------+ + +------------+--------------------------+----------------+----------------------------+ + | **Stage/** | **Task Run Script** | **Number of** | **Wall Clock Time (H:mm)** | + | | | **Processors** | | + +============+==========================+================+============================+ + | 1 | run_get_ics.sh | 1 | 0:20 (depends on HPSS vs | + | | | | FTP vs staged-on-disk) | + +------------+--------------------------+----------------+----------------------------+ + | 1 | run_get_lbcs.sh | 1 | 0:20 (depends on HPSS vs | + | | | | FTP vs staged-on-disk) | + +------------+--------------------------+----------------+----------------------------+ + | 1 | run_make_grid.sh | 24 | 0:20 | + +------------+--------------------------+----------------+----------------------------+ + | 2 | run_make_orog.sh | 24 | 0:20 | + +------------+--------------------------+----------------+----------------------------+ + | 3 | run_make_sfc_climo.sh | 48 | 0:20 | + +------------+--------------------------+----------------+----------------------------+ + | 4 | run_make_ics.sh | 48 | 0:30 | + +------------+--------------------------+----------------+----------------------------+ + | 4 | run_make_lbcs.sh | 48 | 0:30 | + +------------+--------------------------+----------------+----------------------------+ + | 5 | run_fcst.sh | 48 | 0:30 | + +------------+--------------------------+----------------+----------------------------+ + | 6 | run_post.sh | 48 | 0:25 (2 min per output | + | | | | forecast hour) | + +------------+--------------------------+----------------+----------------------------+ + | 7 | run_integration_test.sh | 1 | 0:05 | + +------------+--------------------------+----------------+----------------------------+ Users can access log files for specific tasks in the ``$EXPTDIR/log`` directory. To see how the experiment is progressing, users can also check the end of the ``log.launch_FV3LAM_wflow`` file from the command line: diff --git a/jobs/JREGIONAL_INTEGRATION_TEST b/jobs/JREGIONAL_INTEGRATION_TEST new file mode 100755 index 0000000000..cbb93e86cf --- /dev/null +++ b/jobs/JREGIONAL_INTEGRATION_TEST @@ -0,0 +1,88 @@ +#!/bin/bash + +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +. $USHdir/source_util_funcs.sh +source_config_for_task "task_integration_test|task_run_fcst" ${GLOBAL_VAR_DEFNS_FP} +. $USHdir/job_preamble.sh +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# +{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the J-job script for the plotting task +========================================================================" +# +#----------------------------------------------------------------------- +# +# Set grid name and COMOUT locations. +# +#----------------------------------------------------------------------- +# +if [ "${RUN_ENVIR}" != "nco" ]; then + export fcst_dir="${FCST_DIR}${SLASH_ENSMEM_SUBDIR}" +else + export fcst_dir="${FCST_DIR}" +fi +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job and pass to it the necessary variables. +# +#----------------------------------------------------------------------- +# +$SCRIPTSdir/exregional_integration_test.py \ + --fcst_dir=$fcst_dir \ + --fcst_len=${FCST_LEN_HRS} || \ +print_err_msg_exit "\ +Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." +# +#----------------------------------------------------------------------- +# +# Run job postamble. +# +#----------------------------------------------------------------------- +# +job_postamble +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/function. +# +#----------------------------------------------------------------------- +# +{ restore_shell_opts; } > /dev/null 2>&1 + diff --git a/parm/wflow/default_workflow.yaml b/parm/wflow/default_workflow.yaml index 2451781e6b..b70ad8dbb2 100644 --- a/parm/wflow/default_workflow.yaml +++ b/parm/wflow/default_workflow.yaml @@ -45,4 +45,4 @@ rocoto: - !startstopfreq ['{%- if workflow.DATE_FIRST_CYCL != workflow.DATE_LAST_CYCL %}{{ [workflow.DATE_FIRST_CYCL[0:8], "{:02d}".format(workflow.INCR_CYCL_FREQ)]|join }}{%- else %}{{workflow.DATE_FIRST_CYCL}}{%- endif %}', '{{workflow.DATE_LAST_CYCL}}', '{{workflow.INCR_CYCL_FREQ}}'] log: !cycstr '&LOGDIR;/FV3LAM_wflow.{% if user.RUN_ENVIR == "nco" %}{{ workflow.WORKFLOW_ID + "." }}{% endif %}log' tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/test.yaml"]|include }}' diff --git a/parm/wflow/test.yaml b/parm/wflow/test.yaml new file mode 100644 index 0000000000..716665b228 --- /dev/null +++ b/parm/wflow/test.yaml @@ -0,0 +1,42 @@ +# Settings that will run tasks needed per-experiment to create "fix +# files" for the stationary grid. + +default_task_test: &default_task + account: '&ACCOUNT;' + attrs: + cycledefs: forecast + maxtries: '2' + envars: &default_envars + GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;' + USHdir: '&USHdir;' + FCST_DIR: !cycstr '&FCST_DIR;' + PDY: !cycstr "@Y@m@d" + cyc: !cycstr "@H" + subcyc: !cycstr "@M" + LOGDIR: !cycstr "&LOGDIR;" + SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' + ENSMEM_INDX: '#mem#' + native: '{{ platform.SCHED_NATIVE_CMD }}' + nnodes: 1 + nodes: '{{ nnodes }}:ppn={{ ppn }}' + partition: '{% if platform.get("PARTITION_DEFAULT") %}&PARTITION_DEFAULT;{% else %}None{% endif %}' + ppn: 24 + queue: '&QUEUE_DEFAULT;' + walltime: 00:05:00 + +metatask_integration_test: + var: + mem: '{% if global.DO_ENSEMBLE %}{%- for m in range(1, global.NUM_ENS_MEMBERS+1) -%}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' + task_integration_test_mem#mem#: + <<: *default_task + command: '&LOAD_MODULES_RUN_TASK_FP; "integration_test" "&JOBSdir;/JREGIONAL_INTEGRATION_TEST"' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + dependency: + and_run_fcst: + taskvalid: + attrs: + task: run_fcst_mem#mem# + taskdep: + attrs: + task: run_fcst_mem#mem# + diff --git a/scripts/exregional_integration_test.py b/scripts/exregional_integration_test.py new file mode 100755 index 0000000000..f0ac3d9af6 --- /dev/null +++ b/scripts/exregional_integration_test.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python3 + +################################################################################ +#### Python Script Documentation Block +# +# Script name: exregional_integration_test.py +# Script description: Ensures the correct number of netcdf files are generated +# for each experiment +# +# Author: Eddie Snyder Org: NOAA EPIC Date: 2024-02-05 +# +# Instructions: 1. Pass the appropriate info for the required arguments: +# --fcst_dir=/path/to/forecast/files +# --fcst_len= +# 2. Run script with arguments +# +# Notes/future work: - Currently SRW App only accepts netcdf as the UFS WM +# output file format. If that changes, then additional +# logic is needed to address the other file formats. +# - SRW App doesn't have a variable that updates the +# forecast increment. The UFS WM does with the +# output_fh variable, which can be found in the +# model_configure file. If it becomes available with +# the SRW App, then logic is needed to account for the +# forecast increment variable. +# +################################################################################ + +# -------------Import modules --------------------------# +import os +import sys +import logging +import argparse +import unittest + +# --------------Define some functions ------------------# + + +class TestExptFiles(unittest.TestCase): + fcst_dir = '' + filename_list = '' + + def test_fcst_files(self): + + for filename in self.filename_list: + filename_fp = "{0}/{1}".format(self.fcst_dir, filename) + + logging.info("Checking existence of: {0}".format(filename_fp)) + err_msg = "Missing file: {0}".format(filename_fp) + self.assertTrue(os.path.exists(filename_fp), err_msg) + +def setup_logging(debug=False): + + """Calls initialization functions for logging package, and sets the + user-defined level for logging in the script.""" + + level = logging.INFO + if debug: + level = logging.DEBUG + + logging.basicConfig(format="%(levelname)s: %(message)s ", level=level) + if debug: + logging.info("Logging level set to DEBUG") + + +# -------------Start of script -------------------------# +if __name__ == "__main__": + + parser = argparse.ArgumentParser() + parser.add_argument( + "--fcst_dir", + help="Directory to forecast files.", + required=True, + ) + parser.add_argument( + "--fcst_len", + help="Forecast length.", + required=True, + ) + parser.add_argument( + "--fcst_inc", + default="1", + help="Increment of forecast in hours.", + required=False, + ) + parser.add_argument( + "--debug", + action="store_true", + help="Print debug messages.", + required=False, + ) + parser.add_argument('unittest_args', nargs='*') + args = parser.parse_args() + sys.argv[1:] = args.unittest_args + + fcst_dir = str(args.fcst_dir) + fcst_len = int(args.fcst_len) + fcst_inc = int(args.fcst_inc) + + # Start logger + setup_logging() + + # Check if model_configure exists + model_configure_fp = "{0}/model_configure".format(fcst_dir) + + if not os.path.isfile(model_configure_fp): + logging.error("Experiments model_configure file is missing! Exiting!") + sys.exit(1) + + # Loop through model_configure file to find the netcdf base names + f = open(model_configure_fp, 'r') + + for line in f: + if line.startswith("filename_base"): + filename_base_1 = line.split("'")[1] + filename_base_2 = line.split("'")[3] + break + f.close() + + # Create list of expected filenames from the experiment + fcst_len = fcst_len + 1 + filename_list = [] + + for x in range(0, fcst_len, fcst_inc): + fhour = str(x).zfill(3) + filename_1 = "{0}f{1}.nc".format(filename_base_1, fhour) + filename_2 = "{0}f{1}.nc".format(filename_base_2, fhour) + filename_list.append(filename_1) + filename_list.append(filename_2) + + # Call unittest class + TestExptFiles.fcst_dir = fcst_dir + TestExptFiles.filename_list = filename_list + unittest.main() diff --git a/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml b/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml index f244a527cf..1587fadcc1 100644 --- a/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml +++ b/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml @@ -17,7 +17,7 @@ nco: NET_default: aqm rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/aqm_prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/aqm_prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/test.yaml"]|include }}' task_aqm_ics_ext: metatask_run_ensemble: task_run_fcst_mem#mem#: diff --git a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_Great_Lakes_snow_8km.yaml b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_Great_Lakes_snow_8km.yaml index 981c736239..d773c632e2 100644 --- a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_Great_Lakes_snow_8km.yaml +++ b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_Great_Lakes_snow_8km.yaml @@ -15,7 +15,7 @@ workflow: PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml", "parm/wflow/test.yaml"]|include }}' metatask_run_ensemble: task_run_fcst_mem#mem#: walltime: 01:00:00 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml index 7bc578b203..95b57b0aa5 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml @@ -14,7 +14,7 @@ workflow: PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml", "parm/wflow/test.yaml"]|include }}' task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS USE_USER_STAGED_EXTRN_FILES: true diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml index 6c00cb9aab..3ce1146840 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml @@ -14,7 +14,7 @@ workflow: PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml", "parm/wflow/test.yaml"]|include }}' task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS USE_USER_STAGED_EXTRN_FILES: true diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml index b4b29df9fa..b44dd3eec5 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml @@ -14,7 +14,7 @@ workflow: PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml", "parm/wflow/test.yaml"]|include }}' task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS USE_USER_STAGED_EXTRN_FILES: true diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v17_p8_plot.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v17_p8_plot.yaml index fdf9d3333e..a6e7dd9008 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v17_p8_plot.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v17_p8_plot.yaml @@ -15,7 +15,7 @@ workflow: PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml", "parm/wflow/test.yaml"]|include }}' task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS USE_USER_STAGED_EXTRN_FILES: true diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_RAP_suite_RAP.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_RAP_suite_RAP.yaml index 1e6c5ea724..4ef328a5db 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_RAP_suite_RAP.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_RAP_suite_RAP.yaml @@ -17,7 +17,7 @@ workflow: PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml","parm/wflow/test.yaml"]|include }}' metatask_run_ensemble: task_run_fcst_mem#mem#: walltime: 01:00:00 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml index 23ad640390..933042c82f 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml @@ -16,7 +16,7 @@ workflow: PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml", "parm/wflow/verify_ens.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml", "parm/wflow/verify_ens.yaml", "parm/wflow/test.yaml"]|include }}' metatask_run_ensemble: task_run_fcst_mem#mem#: walltime: 01:00:00 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_NA_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_NA_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml index 0928efe3de..ee06009cad 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_NA_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_NA_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml @@ -33,7 +33,7 @@ task_run_prdgen: DO_PARALLEL_PRDGEN: true rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/prdgen.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/prdgen.yaml", "parm/wflow/test.yaml"]|include }}' task_make_orog: walltime: 01:00:00 metatask_run_ensemble: diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml index 1128f2a73c..275500e692 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml @@ -16,7 +16,7 @@ workflow: PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml", "parm/wflow/test.yaml"]|include }}' metatask_run_ensemble: task_run_fcst_mem#mem#: walltime: 01:00:00 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_RAP_suite_WoFS_v0.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_RAP_suite_WoFS_v0.yaml index 76ded515af..e8a56e9e1e 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_RAP_suite_WoFS_v0.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_RAP_suite_WoFS_v0.yaml @@ -12,6 +12,9 @@ workflow: DATE_LAST_CYCL: '2020081000' FCST_LEN_HRS: 6 PREEXISTING_DIR_METHOD: rename +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/test.yaml"]|include }}' task_get_extrn_ics: EXTRN_MDL_NAME_ICS: HRRR USE_USER_STAGED_EXTRN_FILES: true diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot.yaml index 6c896151b1..a6dc4ce9b0 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot.yaml @@ -14,7 +14,7 @@ workflow: PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml", "parm/wflow/test.yaml"]|include }}' task_get_extrn_ics: EXTRN_MDL_NAME_ICS: RAP EXTRN_MDL_ICS_OFFSET_HRS: 6 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16.yaml index 0c129be5b6..8fc88c9b6a 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16.yaml @@ -14,7 +14,7 @@ workflow: PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/test.yaml"]|include }}' task_get_extrn_ics: USE_USER_STAGED_EXTRN_FILES: true EXTRN_MDL_NAME_ICS: FV3GFS diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16.yaml index 79af5461e3..76c9656686 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16.yaml @@ -14,7 +14,7 @@ workflow: PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/test.yaml"]|include }}' task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS EXTRN_MDL_ICS_OFFSET_HRS: 6 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.yaml index f90b17a95e..9a381857ed 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.yaml @@ -14,7 +14,7 @@ workflow: PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/test.yaml"]|include }}' task_get_extrn_ics: USE_USER_STAGED_EXTRN_FILES: true EXTRN_MDL_NAME_ICS: FV3GFS diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml index be68e9d45e..0755e7fc4d 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml @@ -14,7 +14,7 @@ workflow: PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/test.yaml"]|include }}' task_get_extrn_ics: EXTRN_MDL_NAME_ICS: HRRR USE_USER_STAGED_EXTRN_FILES: true diff --git a/tests/WE2E/test_configs/verification/config.MET_ensemble_verification_winter_wx.yaml b/tests/WE2E/test_configs/verification/config.MET_ensemble_verification_winter_wx.yaml index dd3de4e472..85a515f293 100644 --- a/tests/WE2E/test_configs/verification/config.MET_ensemble_verification_winter_wx.yaml +++ b/tests/WE2E/test_configs/verification/config.MET_ensemble_verification_winter_wx.yaml @@ -14,7 +14,7 @@ workflow: PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml", "parm/wflow/verify_ens.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml", "parm/wflow/verify_ens.yaml", "parm/wflow/test.yaml"]|include }}' metatask_run_ensemble: task_run_fcst_mem#mem#: walltime: 01:00:00 diff --git a/tests/WE2E/test_configs/wflow_features/config.deactivate_tasks.yaml b/tests/WE2E/test_configs/wflow_features/config.deactivate_tasks.yaml index 53d10f002a..91114f5b00 100644 --- a/tests/WE2E/test_configs/wflow_features/config.deactivate_tasks.yaml +++ b/tests/WE2E/test_configs/wflow_features/config.deactivate_tasks.yaml @@ -31,3 +31,4 @@ rocoto: task_get_extrn_lbcs: metatask_run_ensemble: metatask_run_ens_post: + metatask_integration_test: diff --git a/tests/WE2E/test_configs/wflow_features/config.nco_ensemble.yaml b/tests/WE2E/test_configs/wflow_features/config.nco_ensemble.yaml index 739b6bb3c5..c060cb7f9f 100644 --- a/tests/WE2E/test_configs/wflow_features/config.nco_ensemble.yaml +++ b/tests/WE2E/test_configs/wflow_features/config.nco_ensemble.yaml @@ -31,4 +31,4 @@ global: NUM_ENS_MEMBERS: 2 rocoto: tasks: - taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/test.yaml"]|include }}' diff --git a/ush/wrappers/run_integration_test.sh b/ush/wrappers/run_integration_test.sh new file mode 100755 index 0000000000..6ce6afb8ed --- /dev/null +++ b/ush/wrappers/run_integration_test.sh @@ -0,0 +1,14 @@ +#!/bin/sh +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +set -xa +source ${GLOBAL_VAR_DEFNS_FP} +export CDATE=${DATE_FIRST_CYCL} +export CYCLE_DIR=${EXPTDIR}/${CDATE} +export cyc=${DATE_FIRST_CYCL:8:2} +export PDY=${DATE_FIRST_CYCL:0:8} +export SLASH_ENSMEM_SUBDIR="" +export ENSMEM_INDX="" +export FCST_DIR=${EXPTDIR}/$PDY$cyc + +${JOBSdir}/JREGIONAL_INTEGRATION_TEST + From 6e6a27f1a43a423a13657c71d4ff382ebdb9286e Mon Sep 17 00:00:00 2001 From: RatkoVasic-NOAA <37597874+RatkoVasic-NOAA@users.noreply.github.com> Date: Wed, 13 Mar 2024 10:11:21 -0400 Subject: [PATCH 07/42] [develop] Jet switch from CentOS to Rocky (#1045) Jet has migrated from CentOS to Rocky8 following the system maintenance on 03/12/2024. This work sets the updated Rocky8 spack-stack as default in the build_jet_intel.lua modulefile and modifies the Jet machine file to use PARTITION_FCST: xjet. --------- Co-authored-by: michael.lueken --- modulefiles/build_jet_intel.lua | 3 +-- ush/machine/jet.yaml | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/modulefiles/build_jet_intel.lua b/modulefiles/build_jet_intel.lua index e687531ac8..eb2893d3cc 100644 --- a/modulefiles/build_jet_intel.lua +++ b/modulefiles/build_jet_intel.lua @@ -5,7 +5,7 @@ the NOAA RDHPC machine Jet using Intel-2021.5.0 whatis([===[Loads libraries needed for building the UFS SRW App on Jet ]===]) -prepend_path("MODULEPATH","/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.5.0/envs/unified-env/install/modulefiles/Core") +prepend_path("MODULEPATH","/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.5.0/envs/unified-env-rocky8/install/modulefiles/Core") prepend_path("MODULEPATH", "/lfs4/HFIP/hfv3gfs/spack-stack/modulefiles") load("stack-intel/2021.5.0") @@ -15,7 +15,6 @@ load("cmake/3.23.1") load("srw_common") -load("prod-util/1.2.2") load("nccmp/1.9.0.1") load("nco/5.0.6") diff --git a/ush/machine/jet.yaml b/ush/machine/jet.yaml index bef698f874..93d375ee02 100644 --- a/ush/machine/jet.yaml +++ b/ush/machine/jet.yaml @@ -9,7 +9,7 @@ platform: DOMAIN_PREGEN_BASEDIR: /mnt/lfs4/HFIP/hfv3gfs/role.epic/UFS_SRW_data/develop/FV3LAM_pregen PARTITION_DEFAULT: sjet,vjet,kjet,xjet QUEUE_DEFAULT: batch - PARTITION_FCST: vjet + PARTITION_FCST: xjet QUEUE_FCST: batch PARTITION_HPSS: service QUEUE_HPSS: batch From 5f461daab2eec5ae6a31ece2f7f146e27fbc36ed Mon Sep 17 00:00:00 2001 From: EdwardSnyder-NOAA <96196752+EdwardSnyder-NOAA@users.noreply.github.com> Date: Thu, 14 Mar 2024 12:23:07 -0500 Subject: [PATCH 08/42] [develop] Expand forecast fields for metric test (#1048) This PR expands the number of forecast fields for the Skill Score metric test. The forecast length in the metric WE2E test was extended to 12 hours so that the RMSE metric can be calculated for these additional forecast fields: * Specific humidity for the full column * Temperature for the full column * Wind for the full column * Dew point, pressure, temperature, and wind at the surface level for forecast hour 12. Adding these additional forecast fields will make the skill score metric test more thorough and thus making it a more inclusive test to compare against. Also, a change was made to the .cicd/scripts/srw_metric_example.sh script to reflect the new conda environment. --------- Co-authored-by: Parallel Works app-run user Co-authored-by: Parallel Works app-run user --- .cicd/scripts/srw_metric_example.sh | 139 ++++++++++++------ modulefiles/tasks/gaea/run_vx.local.lua | 4 +- modulefiles/tasks/orion/run_vx.local.lua | 5 +- parm/metplus/STATAnalysisConfig_skill_score | 70 ++++++--- ..._ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml | 2 +- 5 files changed, 157 insertions(+), 63 deletions(-) diff --git a/.cicd/scripts/srw_metric_example.sh b/.cicd/scripts/srw_metric_example.sh index 2018505735..45dd30c299 100755 --- a/.cicd/scripts/srw_metric_example.sh +++ b/.cicd/scripts/srw_metric_example.sh @@ -3,22 +3,45 @@ # The goal of this script is to provide an example of performing Indy-Severe-Weather test run and compare results to reference with # Skill score index that is calculated by MET Stat-Analysis Tools # -# Required: +# Required (these options are set in the Jenkins env): # WORKSPACE= # SRW_PLATFORM= # SRW_COMPILER= +# SRW_PROJECT= # # Optional: -[[ -n ${SRW_PROJECT} ]] || SRW_PROJECT="no_account" +#[[ -n ${SRW_PROJECT} ]] || SRW_PROJECT="no_account" [[ -n ${FORGIVE_CONDA} ]] || FORGIVE_CONDA=true set -e -u -x +BUILD_OPT=false +RUN_WE2E_OPT=false +RUN_STAT_ANLY_OPT=false + +if [[ $# -eq 0 ]]; then + BUILD_OPT=true + RUN_WE2E_OPT=true + RUN_STAT_ANLY_OPT=true +elif [[ $# -ge 4 ]]; then + echo "Too many arguments, expecting three or less" + exit 1 +else + for opt in "$@"; do + case $opt in + build) BUILD_OPT=true ;; + run_we2e) RUN_WE2E_OPT=true ;; + run_stat_anly) RUN_STAT_ANLY_OPT=true ;; + *) echo "Not valid option. Exiting!" ; exit 1 ;; + esac + done +fi + script_dir="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)" # Get repository root from Jenkins WORKSPACE variable if set, otherwise, set # relative to script directory. declare workspace -if [[ -n "${WORKSPACE}/${SRW_PLATFORM}" ]]; then +if [[ -d "${WORKSPACE}/${SRW_PLATFORM}" ]]; then workspace="${WORKSPACE}/${SRW_PLATFORM}" else workspace="$(cd -- "${script_dir}/../.." && pwd)" @@ -39,53 +62,85 @@ we2e_test_name="grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0" pwd -# Activate the workflow environment ... +# Setup the build environment +declare srw_compiler +srw_compiler=${SRW_COMPILER} source etc/lmod-setup.sh ${platform,,} module use modulefiles -module load build_${platform,,}_${SRW_COMPILER} +module load build_${platform,,}_${srw_compiler} + +# Build srw +if [[ ${BUILD_OPT} == true ]]; then + cd ${workspace}/tests + ./build.sh ${platform,,} ${srw_compiler} +fi +cd ${workspace} + +# Activate workflow environment module load wflow_${platform,,} -[[ ${FORGIVE_CONDA} == true ]] && set +e +u # Some platforms have incomplete python3 or conda support, but wouldn't necessarily block workflow tests -conda activate workflow_tools +[[ ${FORGIVE_CONDA} == true ]] && set +e +u # Some platforms have incomplete python3 or conda support, but would not necessarily block workflow tests +conda activate srw_app set -e -u -# build srw -cd ${workspace}/tests -./build.sh ${platform,,} ${SRW_COMPILER} -cd ${workspace} - -# run test -[[ -d ${we2e_experiment_base_dir} ]] && rm -rf ${we2e_experiment_base_dir} -cd ${workspace}/tests/WE2E -./run_WE2E_tests.py -t ${we2e_test_name} -m ${platform,,} -a ${SRW_PROJECT} --expt_basedir "metric_test" --exec_subdir=install_intel/exec -q +# Run test +declare srw_project +srw_project=${SRW_PROJECT} +if [[ ${RUN_WE2E_OPT} == true ]]; then + [[ -d ${we2e_experiment_base_dir} ]] && rm -rf ${we2e_experiment_base_dir} + cd ${workspace}/tests/WE2E + ./run_WE2E_tests.py -t ${we2e_test_name} -m ${platform,,} -a ${srw_project} --expt_basedir "metric_test" --exec_subdir=install_intel/exec -q +fi cd ${workspace} -# run skill-score check -[[ ! -f Indy-Severe-Weather.tgz ]] && wget https://noaa-ufs-srw-pds.s3.amazonaws.com/sample_cases/release-public-v2.1.0/Indy-Severe-Weather.tgz -[[ ! -d Indy-Severe-Weather ]] && tar xvfz Indy-Severe-Weather.tgz -[[ -f skill-score.out ]] && rm skill-score.out -# Skill score index is computed over several terms that are defined in parm/metplus/STATAnalysisConfig_skill_score. -# It is computed by aggregating the output from earlier runs of the Point-Stat and/or Grid-Stat tools over one or more cases. -# In this example, skill score index is a weighted average of 4 skill scores of RMSE statistics for wind speed, dew point temperature, -# temperature, and pressure at lowest level in the atmosphere over 6 hour lead time. -cp ${we2e_experiment_base_dir}/${we2e_test_name}/2019061500/metprd/PointStat/*.stat ${workspace}/Indy-Severe-Weather/metprd/point_stat/ -# load met and metplus -module use modulefiles/tasks/${platform,,} -module load run_vx.local -stat_analysis -config parm/metplus/STATAnalysisConfig_skill_score -lookin ${workspace}/Indy-Severe-Weather/metprd/point_stat -v 2 -out skill-score.out +# Run skill-score check +if [[ ${RUN_STAT_ANLY_OPT} == true ]]; then + # Clear out data + rm -rf ${workspace}/Indy-Severe-Weather/ + # Check if metprd data exists locally otherwise get it from S3 + TEST_EXTRN_MDL_SOURCE_BASEDIR=$(grep TEST_EXTRN_MDL_SOURCE_BASEDIR ${workspace}/ush/machine/${SRW_PLATFORM}.yaml | awk '{print $NF}') + if [[ ! -d $(dirname ${TEST_EXTRN_MDL_SOURCE_BASEDIR})/metprd/point_stat ]] ; then + mkdir -p Indy-Severe-Weather/metprd/point_stat + cp -rp $(dirname ${TEST_EXTRN_MDL_SOURCE_BASEDIR})/metprd/point_stat Indy-Severe-Weather/metprd + elif [[ -f Indy-Severe-Weather.tgz ]]; then + tar xvfz Indy-Severe-Weather.tgz + else + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/sample_cases/release-public-v2.1.0/Indy-Severe-Weather.tgz + tar xvfz Indy-Severe-Weather.tgz + fi + [[ -f skill-score.txt ]] && rm skill-score.txt + # Skill score index is computed over several terms that are defined in parm/metplus/STATAnalysisConfig_skill_score. + # It is computed by aggregating the output from earlier runs of the Point-Stat and/or Grid-Stat tools over one or more cases. + # In this example, skill score index is a weighted average of 4 skill scores of RMSE statistics for wind speed, dew point temperature, + # temperature, and pressure at lowest level in the atmosphere over 6 hour lead time. + cp ${we2e_experiment_base_dir}/${we2e_test_name}/2019061500/metprd/PointStat/*.stat ${workspace}/Indy-Severe-Weather/metprd/point_stat/ + # Remove conda for Orion due to conda env conflicts + if [[ ${platform} =~ "orion" ]]; then + sed -i 's|load("conda")|--load("conda")|g' ${workspace}/modulefiles/tasks/${platform,,}/run_vx.local.lua + fi + # Load met and metplus + module use modulefiles/tasks/${platform,,} + module load run_vx.local + # Reset Orion run_vx.local file + if [[ ${platform} =~ "orion" ]]; then + sed -i 's|--load("conda")|load("conda")|g' ${workspace}/modulefiles/tasks/${platform,,}/run_vx.local.lua + fi + # Run stat_analysis + stat_analysis -config parm/metplus/STATAnalysisConfig_skill_score -lookin ${workspace}/Indy-Severe-Weather/metprd/point_stat -v 2 -out skill-score.txt -# check skill-score.out -cat skill-score.out + # check skill-score.txt + cat skill-score.txt -# get skill-score (SS_INDEX) and check if it is significantly smaller than 1.0 -# A value greater than 1.0 indicates that the forecast model outperforms the reference, -# while a value less than 1.0 indicates that the reference outperforms the forecast. -tmp_string=$( tail -2 skill-score.out | head -1 ) -SS_INDEX=$(echo $tmp_string | awk -F " " '{print $NF}') -echo "Skill Score: ${SS_INDEX}" -if [[ ${SS_INDEX} < "0.700" ]]; then - echo "Your Skill Score is way smaller than 1.00, better check before merging" - exit 1 -else - echo "Congrats! You pass check!" + # get skill-score (SS_INDEX) and check if it is significantly smaller than 1.0 + # A value greater than 1.0 indicates that the forecast model outperforms the reference, + # while a value less than 1.0 indicates that the reference outperforms the forecast. + tmp_string=$( tail -2 skill-score.txt | head -1 ) + SS_INDEX=$(echo $tmp_string | awk -F " " '{print $NF}') + echo "Skill Score: ${SS_INDEX}" + if [[ ${SS_INDEX} < "0.700" ]]; then + echo "Your Skill Score is way smaller than 1.00, better check before merging" + exit 1 + else + echo "Congrats! You pass check!" + fi fi diff --git a/modulefiles/tasks/gaea/run_vx.local.lua b/modulefiles/tasks/gaea/run_vx.local.lua index cb64e9a38a..57cdfbb1cc 100644 --- a/modulefiles/tasks/gaea/run_vx.local.lua +++ b/modulefiles/tasks/gaea/run_vx.local.lua @@ -22,4 +22,6 @@ if (mode() == "unload") then unload(pathJoin("met", met_ver)) unload(pathJoin("metplus",metplus_ver)) end -load("python_srw") +load("conda") +setenv("SRW_ENV", "srw_app") +setenv("LD_PRELOAD", "/opt/cray/pe/gcc/12.2.0/snos/lib64/libstdc++.so.6") diff --git a/modulefiles/tasks/orion/run_vx.local.lua b/modulefiles/tasks/orion/run_vx.local.lua index 1fa9617365..5bafb4d46b 100644 --- a/modulefiles/tasks/orion/run_vx.local.lua +++ b/modulefiles/tasks/orion/run_vx.local.lua @@ -1,7 +1,7 @@ --[[ Compiler-specific modules are used for met and metplus libraries --]] -load("build_orion_intel") +--load("build_orion_intel") local met_ver = (os.getenv("met_ver") or "11.1.0") local metplus_ver = (os.getenv("metplus_ver") or "5.1.0") @@ -27,4 +27,5 @@ if (mode() == "unload") then end --load("ufs-pyenv") load("stack-python/3.10.8") -load("python_srw") +load("conda") +setenv("SRW_ENV", "srw_app") diff --git a/parm/metplus/STATAnalysisConfig_skill_score b/parm/metplus/STATAnalysisConfig_skill_score index 6fd64a6456..fba1106d6b 100644 --- a/parm/metplus/STATAnalysisConfig_skill_score +++ b/parm/metplus/STATAnalysisConfig_skill_score @@ -11,10 +11,19 @@ // model = ["FV3_WoFS_v0_SUBCONUS_3km_test_mem000", "FV3_GFS_v16_SUBCONUS_3km"]; -fcst_lead = [ "6", - "6", - "6", - "6" +fcst_lead = [ "6", "12", + "6", "12", + "6", "12", + "6", "12", + "12", + "12", + "12", + "12", + "12", + "12", + "12", + "12", + "12" ]; obs_lead = []; @@ -42,17 +51,35 @@ obs_init_inc = []; obs_init_exc = []; obs_init_hour = []; -fcst_var = [ "WIND", - "DPT", - "TMP", - "PRMSL" +fcst_var = [ "PRMSL", "PRMSL", + "WIND", "WIND", + "DPT", "DPT", + "TMP", "TMP", + "WIND", + "WIND", + "WIND", + "TMP", + "TMP", + "TMP", + "SPFH", + "SPFH", + "SPFH" ]; obs_var = []; -fcst_lev = [ "Z10", - "Z2", - "Z2", - "Z0" +fcst_lev = [ "Z0", "Z0", + "Z10", "Z10", + "Z2", "Z2", + "Z2", "Z2", + "P250", + "P400", + "P850", + "P250", + "P400", + "P850", + "P300", + "P500", + "P850" ]; obs_lev = []; @@ -74,10 +101,19 @@ line_type = [ "SL1L2" ]; column = [ "RMSE" ]; -weight = [ 10.0, - 10.0, - 10.0, - 10.0 +weight = [ 10.0, 8.0, + 10.0, 8.0, + 10.0, 8.0, + 10.0, 8.0, + 4.0, + 4.0, + 4.0, + 4.0, + 4.0, + 4.0, + 4.0, + 4.0, + 4.0 ]; //////////////////////////////////////////////////////////////////////////////// @@ -116,6 +152,6 @@ hss_ec_value = NA; rank_corr_flag = FALSE; vif_flag = FALSE; tmp_dir = "/tmp"; -version = "V10.1.1"; +version = "V11.1.0"; //////////////////////////////////////////////////////////////////////////////// diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml index 275500e692..120a38291e 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml @@ -12,7 +12,7 @@ workflow: PREDEF_GRID_NAME: SUBCONUS_Ind_3km DATE_FIRST_CYCL: '2019061500' DATE_LAST_CYCL: '2019061500' - FCST_LEN_HRS: 6 + FCST_LEN_HRS: 12 PREEXISTING_DIR_METHOD: rename rocoto: tasks: From 79bbe5df7a8373266b84740610470a52244930cf Mon Sep 17 00:00:00 2001 From: EdwardSnyder-NOAA <96196752+EdwardSnyder-NOAA@users.noreply.github.com> Date: Fri, 15 Mar 2024 15:46:39 -0500 Subject: [PATCH 09/42] Update GFS v17 p8 suite to address cold bias (#1055) A SRW App user noticed an issue with the FV3_GFS_v17_p8 physics suite, that the surface temperatures were dropping unrealistically throughout the forecast. This PR addresses that issue by updating the FV3_GFS_v17_p8 physics suite in the parm/FV3.input.yml file. This issue was discovered in the SRW App v2.2.0, but since the FV3_GFS_v17_p8 physics suite is not officially supported for the release, the change will only go into in the develop branch. --- parm/FV3.input.yml | 90 +++++++++++++++------------------------------- 1 file changed, 29 insertions(+), 61 deletions(-) diff --git a/parm/FV3.input.yml b/parm/FV3.input.yml index e2df4a2a9e..aa15625532 100644 --- a/parm/FV3.input.yml +++ b/parm/FV3.input.yml @@ -466,120 +466,88 @@ FV3_GFS_v17_p8: launch_level: 27 fv_core_nml: <<: *gfs_v15_fv_core - agrid_vel_rst: True + agrid_vel_rst: False d2_bg_k1: 0.2 - d2_bg_k2: 0.04 - delt_max: 0.002 + d2_bg_k2: 0.0 dnats: 0 do_sat_adj: False - do_vort_damp: !!python/none - full_zs_filter: !!python/none fv_sg_adj: 450 hord_dp: -5 hord_mt: 5 hord_tm: 5 - hord_vt: 5 hord_tr: 8 + hord_vt: 5 k_split: 6 + make_nh: True n_split: 6 n_sponge: 10 - nord: 2 + na_init: 1 + nord: 1 nudge_dz: False - n_zs_filter: !!python/none - range_warn: True res_latlon_dynamics: '' rf_fast: !!python/none tau: 10.0 - gfdl_cloud_microphysics_nml: - <<: *gfs_gfdl_cloud_mp - mp_time: 150.0 - reiflag: 2 - rthresh: 1.0e-06 - sedi_transport: True - tau_l2v: 225.0 - tau_v2l: 150.0 gfs_physics_nml: - <<: *gfs_v15_gfs_physics - active_gases: h2o_co2_o3_n2o_ch4_o2 - bl_mynn_edmf: 1 - bl_mynn_edmf_mom: 1 - bl_mynn_tkeadvect: True - cdmbgwd: [4.0, 0.15, 1.0, 1.0] - cplchm: False + cdmbgwd: [4.0, 0.05, 1.0, 1.0] + cnvcld: True + cnvgwd: True decfl: 10 + do_deep: True do_gsl_drag_ls_bl: False do_gsl_drag_ss: True do_gsl_drag_tofd: False - do_myjpbl: !!python/none - do_myjsfc: !!python/none do_mynnedmf: False do_mynnsfclay: False - do_RRTMGP: False + do_tofd: False + do_ugwp: False do_ugwp_v0: True - do_ugwp_v0_nst_only: False do_ugwp_v0_orog_only: False + do_ugwp_v0_nst_only: False do_ugwp_v1: False do_ugwp_v1_orog_only: False - do_ysu: !!python/none - doGP_cldoptics_LUT: False - doGP_lwscat: False - dt_inner: 150 - frac_grid: True + dt_inner: 150.0 + fhlwr: 1200.0 + fhswr: 1200.0 + frac_grid: False gwd_opt: 2 - hybedmf: False iaer: 1011 ialb: 2 - iau_inc_files: !!python/none - icloud_bl: 1 icliq_sw: 2 iems: 2 - imp_physics: 8 + imfdeepcnv: 2 + imfshalcnv: 2 iopt_alb: 1 + iopt_btr: 1 iopt_crs: 2 iopt_dveg: 4 + iopt_frz: 1 + iopt_inf: 1 iopt_rad: 3 + iopt_run: 1 iopt_sfc: 3 + iopt_snf: 4 iopt_stc: 3 + iopt_tbot: 2 iovr: 3 isatmedmf: 1 - ldiag_ugwp: !!python/none + ldiag_ugwp: False + lseaspray: True lgfdlmprad: False + lheatstrg: False lradar: False - lseaspray: True lsm: 2 - lsoil: 4 lsoil_lsm: 4 ltaerosol: False - lw_file_clouds: rrtmgp-cloud-optics-coeffs-lw.nc - lw_file_gas: rrtmgp-data-lw-g128-210809.nc min_lakeice: 0.15 min_seaice: 0.15 - nsfullradar_diag: !!python/none - prautco: [0.00015, 0.00015] - psautco: [0.0008, 0.0005] qdiag3d: False ras: False - rrtmgp_nBandsLW: 16 - rrtmgp_nBandsSW: 14 - rrtmgp_nGptsLW: 128 - rrtmgp_nGptsSW: 112 satmedmf: True sedi_semi: True - sfclay_compute_flux: !!python/none - shinhong: !!python/none - sw_file_clouds: rrtmgp-cloud-optics-coeffs-sw.nc - sw_file_gas: rrtmgp-data-sw-g112-210809.nc - ttendlim: -999 - xkzminv: !!python/none - xkzm_m: !!python/none - xkzm_h: !!python/none + shal_cnv: True mpp_io_nml: deflate_level: 1 shuffle: 1 namsfc: - fsicl: 0 - fsics: 0 - landice: False - ldebug: False surf_map_nml: !!python/none From 6010effc7141ca1bca561ab82787dacdd7465ebf Mon Sep 17 00:00:00 2001 From: Christina Holt <56881914+christinaholtNOAA@users.noreply.github.com> Date: Wed, 20 Mar 2024 07:15:12 -0600 Subject: [PATCH 10/42] [develop] Use uwtools instead of set_namelist (#1054) Continues the integration of the uwtools package. In this PR, I've done the following: * Call the UW config tool instead of set_namelist using the uwtools CLI in bash scripts and API in Python scripts * Lint the ush/set_fv3nml*.py files * Update uwtools to the latest release version --------- Co-authored-by: michael.lueken --- .github/workflows/python_tests.yaml | 2 + environment.yml | 2 +- modulefiles/build_hera_gnu.lua | 5 +- modulefiles/build_hera_intel.lua | 5 +- parm/FV3.input.yml | 123 +++--- parm/fixed_files_mapping.yaml | 29 -- scripts/exregional_make_grid.sh | 50 +-- scripts/exregional_make_ics.sh | 101 +++-- scripts/exregional_make_lbcs.sh | 80 ++-- scripts/exregional_run_fcst.sh | 8 +- ...onal_run_met_genensprod_or_ensemblestat.sh | 2 +- ...gional_run_met_gridstat_or_pointstat_vx.sh | 2 +- ...un_met_gridstat_or_pointstat_vx_ensmean.sh | 2 +- ...un_met_gridstat_or_pointstat_vx_ensprob.sh | 2 +- scripts/exregional_run_met_pb2nc_obs.sh | 2 +- scripts/exregional_run_met_pcpcombine.sh | 2 +- ....py => test_set_fv3nml_ens_stoch_seeds.py} | 34 +- ...=> test_set_fv3nml_sfc_climo_filenames.py} | 24 +- ush/generate_FV3LAM_wflow.py | 51 ++- ...seeds.py => set_fv3nml_ens_stoch_seeds.py} | 94 ++--- ...s.py => set_fv3nml_sfc_climo_filenames.py} | 106 +++--- ush/set_namelist.py | 355 ------------------ ush/update_input_nml.py | 142 +++---- 23 files changed, 369 insertions(+), 854 deletions(-) rename tests/test_python/{test_set_FV3nml_ens_stoch_seeds.py => test_set_fv3nml_ens_stoch_seeds.py} (68%) rename tests/test_python/{test_set_FV3nml_sfc_climo_filenames.py => test_set_fv3nml_sfc_climo_filenames.py} (69%) rename ush/{set_FV3nml_ens_stoch_seeds.py => set_fv3nml_ens_stoch_seeds.py} (64%) rename ush/{set_FV3nml_sfc_climo_filenames.py => set_fv3nml_sfc_climo_filenames.py} (53%) delete mode 100755 ush/set_namelist.py diff --git a/.github/workflows/python_tests.yaml b/.github/workflows/python_tests.yaml index 0e71f8d72d..fb0de16910 100644 --- a/.github/workflows/python_tests.yaml +++ b/.github/workflows/python_tests.yaml @@ -41,6 +41,8 @@ jobs: pylint --ignore-imports=yes tests/test_python/ pylint ush/create_*.py pylint ush/generate_FV3LAM_wflow.py + pylint ush/set_fv3nml*.py + pylint ush/update_input_nml.py - name: Run python unittests run: | diff --git a/environment.yml b/environment.yml index c574df5e23..faeb19d466 100644 --- a/environment.yml +++ b/environment.yml @@ -5,4 +5,4 @@ channels: dependencies: - pylint=2.17* - pytest=7.2* - - uwtools=1.0.0 + - uwtools=2.1* diff --git a/modulefiles/build_hera_gnu.lua b/modulefiles/build_hera_gnu.lua index 90bd671b5a..d5f78f397b 100644 --- a/modulefiles/build_hera_gnu.lua +++ b/modulefiles/build_hera_gnu.lua @@ -5,10 +5,7 @@ the NOAA RDHPC machine Hera using GNU 9.2.0 whatis([===[Loads libraries needed for building the UFS SRW App on Hera using GNU 9.2.0 ]===]) --- When Hera switches from CentOS to Rocky, replace line withh correct path to spack-stack --- If you want to use Rocky OS now, use line below ---prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.0/envs/unified-env-rocky8/install/modulefiles/Core") -prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.0/envs/unified-env-noavx512/install/modulefiles/Core") +prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.0/envs/unified-env-rocky8/install/modulefiles/Core") prepend_path("MODULEPATH", "/scratch1/NCEPDEV/jcsda/jedipara/spack-stack/modulefiles") load("stack-gcc/9.2.0") diff --git a/modulefiles/build_hera_intel.lua b/modulefiles/build_hera_intel.lua index 314fd89183..2121d303dc 100644 --- a/modulefiles/build_hera_intel.lua +++ b/modulefiles/build_hera_intel.lua @@ -8,10 +8,7 @@ whatis([===[Loads libraries needed for building the UFS SRW App on Hera ]===]) prepend_path("MODULEPATH","/contrib/sutils/modulefiles") load("sutils") --- When Hera switches from CentOS to Rocky, replace line withh correct path to spack-stack --- If you want to use Rocky OS now, use line below ---prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.0/envs/unified-env-rocky8/install/modulefiles/Core") -prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.0/envs/unified-env-noavx512/install/modulefiles/Core") +prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.0/envs/unified-env-rocky8/install/modulefiles/Core") prepend_path("MODULEPATH", "/scratch1/NCEPDEV/jcsda/jedipara/spack-stack/modulefiles") stack_intel_ver=os.getenv("stack_intel_ver") or "2021.5.0" diff --git a/parm/FV3.input.yml b/parm/FV3.input.yml index aa15625532..efb6c85f5b 100644 --- a/parm/FV3.input.yml +++ b/parm/FV3.input.yml @@ -4,12 +4,7 @@ # parm/input.nml.FV3 # # to obtain the namelist for each physics suite that the SRW App can -# run with. To build a namelist for one of these configurations, use -# the Python helper script -# -# ush/set_namelist.py -# -# and provide this file and the desired section via the -c option. +# run with. FV3_RRFS_v1beta: @@ -83,11 +78,11 @@ FV3_HRRR: <<: *RRFS_v1beta_phys cdmbgwd: [3.5, 1.0] do_mynnsfclay: True - do_sfcperts: !!python/none + do_sfcperts: null gwd_opt: 3 do_gsl_drag_ss: True do_gsl_drag_tofd: True - do_gsl_drag_ls_bl: True + do_gsl_drag_ls_bl: True iaer: 5111 icliq_sw: 2 iovr: 3 @@ -102,8 +97,8 @@ FV3_HRRR: mosaic_lu: 0 mosaic_soil: 0 thsfc_loc: False - nst_anl: - nstf_name: + nst_anl: null + nstf_name: null FV3_RAP: fv_core_nml: @@ -112,7 +107,7 @@ FV3_RAP: <<: *RRFS_v1beta_phys cdmbgwd: [3.5, 1.0] do_mynnsfclay: True - do_sfcperts: !!python/none + do_sfcperts: null gwd_opt: 3 do_gsl_drag_ss: True do_gsl_drag_tofd: True @@ -140,40 +135,40 @@ FV3_GFS_2017_gfdlmp: k_split: 6 n_split: 6 nord: 2 - nord_zs_filter: !!python/none + nord_zs_filter: null range_warn: False vtdm4: 0.075 gfs_physics_nml: &gfs_2017_gfdlmp_phys avg_max_length: 3600.0 - bl_mynn_tkeadvect: !!python/none - bl_mynn_edmf: !!python/none - bl_mynn_edmf_mom: !!python/none + bl_mynn_tkeadvect: null + bl_mynn_edmf: null + bl_mynn_edmf_mom: null cdmbgwd: [3.5, 0.01] - cplflx: !!python/none + cplflx: null do_deep: False - do_mynnedmf: !!python/none - do_mynnsfclay: !!python/none + do_mynnedmf: null + do_mynnsfclay: null fhcyc: 0.0 fhlwr: 3600.0 fhswr: 3600.0 hybedmf: True - icloud_bl: !!python/none + icloud_bl: null imfdeepcnv: 2 imfshalcnv: 2 imp_physics: 11 lgfdlmprad: True - lheatstrg: !!python/none - lndp_type: !!python/none - lsm: !!python/none - lsoil: !!python/none - lsoil_lsm: !!python/none - ltaerosol: !!python/none - n_var_lndp: !!python/none + lheatstrg: null + lndp_type: null + lsm: null + lsoil: null + lsoil_lsm: null + ltaerosol: null + n_var_lndp: null oz_phys: True oz_phys_2015: False - satmedmf: !!python/none + satmedmf: null shal_cnv: True - ttendlim: !!python/none + ttendlim: null gfdl_cloud_microphysics_nml: &gfs_gfdl_cloud_mp c_cracw: 0.8 c_paut: 0.5 @@ -272,7 +267,7 @@ FV3_GFS_v15p2: kord_wz: 9 n_split: 8 n_sponge: 30 - nord_zs_filter: !!python/none + nord_zs_filter: null nudge_qv: True range_warn: False rf_cutoff: 750.0 @@ -283,16 +278,16 @@ FV3_GFS_v15p2: tau_l2v: 225.0 tau_v2l: 150.0 gfs_physics_nml: &gfs_v15_gfs_physics - bl_mynn_edmf: !!python/none - bl_mynn_edmf_mom: !!python/none - bl_mynn_tkeadvect: !!python/none + bl_mynn_edmf: null + bl_mynn_edmf_mom: null + bl_mynn_tkeadvect: null cnvcld: True cnvgwd: True - cplflx: !!python/none + cplflx: null do_myjpbl: False do_myjsfc: False - do_mynnedmf: !!python/none - do_mynnsfclay: !!python/none + do_mynnedmf: null + do_mynnsfclay: null do_tofd: False do_ugwp: False do_ysu: False @@ -300,12 +295,12 @@ FV3_GFS_v15p2: fhlwr: 3600.0 fhswr: 3600.0 hybedmf: True - iau_delthrs: !!python/none - iaufhrs: !!python/none + iau_delthrs: null + iaufhrs: null imfdeepcnv: 2 imfshalcnv: 2 imp_physics: 11 - icloud_bl: !!python/none + icloud_bl: null iopt_alb: 2 iopt_btr: 1 iopt_crs: 1 @@ -321,28 +316,28 @@ FV3_GFS_v15p2: iopt_trs: 2 ldiag_ugwp: False lgfdlmprad: True - lradar: !!python/none + lradar: null lsm: 1 - lsoil: !!python/none - lsoil_lsm: !!python/none - ltaerosol: !!python/none + lsoil: null + lsoil_lsm: null + ltaerosol: null shal_cnv: True shinhong: False - ttendlim: !!python/none + ttendlim: null xkzm_h: 1.0 xkzm_m: 1.0 xkzminv: 0.3 namsfc: landice: True ldebug: False - surf_map_nml: + surf_map_nml: null FV3_GFS_v15_thompson_mynn_lam3km: atmos_model_nml: avg_max_length: 3600.0 fv_core_nml: agrid_vel_rst: True - full_zs_filter: !!python/none + full_zs_filter: null n_sponge: 9 npz_type: '' rf_fast: False @@ -382,20 +377,20 @@ FV3_GFS_v15_thompson_mynn_lam3km: iopt_snf: 4 iopt_stc: 1 iopt_tbot: 2 - iopt_trs: !!python/none + iopt_trs: null iovr: 3 ldiag_ugwp: False lgfdlmprad: False lsm: 1 - lsoil: !!python/none - lsoil_lsm: !!python/none + lsoil: null + lsoil_lsm: null ltaerosol: False print_diff_pgr: True - sfclay_compute_flux: !!python/none + sfclay_compute_flux: null xkzminv: 0.3 xkzm_m: 1.0 xkzm_h: 1.0 - surf_map_nml: !!python/none + surf_map_nml: null FV3_GFS_v16: cires_ugwp_nml: @@ -419,7 +414,7 @@ FV3_GFS_v16: na_init: 0 nudge_dz: False res_latlon_dynamics: '' - rf_fast: !!python/none + rf_fast: null tau: 10.0 gfdl_cloud_microphysics_nml: <<: *gfs_gfdl_cloud_mp @@ -431,10 +426,10 @@ FV3_GFS_v16: gfs_physics_nml: <<: *gfs_v15_gfs_physics cdmbgwd: [4.0, 0.15, 1.0, 1.0] - do_myjpbl: !!python/none - do_myjsfc: !!python/none + do_myjpbl: null + do_myjsfc: null do_tofd: True - do_ysu: !!python/none + do_ysu: null hybedmf: False iaer: 5111 icliq_sw: 2 @@ -443,23 +438,23 @@ FV3_GFS_v16: isatmedmf: 1 lgfdlmprad: True lheatstrg: True - lndp_type: !!python/none + lndp_type: null lsoil: 4 - n_var_lndp: !!python/none + n_var_lndp: null prautco: [0.00015, 0.00015] psautco: [0.0008, 0.0005] satmedmf: True - shinhong: !!python/none - xkzminv: !!python/none - xkzm_m: !!python/none - xkzm_h: !!python/none + shinhong: null + xkzminv: null + xkzm_m: null + xkzm_h: null mpp_io_nml: deflate_level: 1 shuffle: 1 namsfc: landice: True ldebug: False - surf_map_nml: !!python/none + surf_map_nml: null FV3_GFS_v17_p8: cires_ugwp_nml: @@ -485,7 +480,7 @@ FV3_GFS_v17_p8: nord: 1 nudge_dz: False res_latlon_dynamics: '' - rf_fast: !!python/none + rf_fast: null tau: 10.0 gfs_physics_nml: cdmbgwd: [4.0, 0.05, 1.0, 1.0] @@ -548,6 +543,4 @@ FV3_GFS_v17_p8: mpp_io_nml: deflate_level: 1 shuffle: 1 - namsfc: - surf_map_nml: !!python/none - + surf_map_nml: null diff --git a/parm/fixed_files_mapping.yaml b/parm/fixed_files_mapping.yaml index 90fd1870a4..54ddd41a81 100644 --- a/parm/fixed_files_mapping.yaml +++ b/parm/fixed_files_mapping.yaml @@ -139,35 +139,6 @@ fixed_files: !join_str ["FNSMCC | ",*FNSMCC], !join_str ["FNMSKH | ",*FNMSKH] ] - #"FNZORC | $FNZORC", - - # - #----------------------------------------------------------------------- - # - # FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING: - # This array is used to set some of the namelist variables in the forecast - # model's namelist file that represent the relative or absolute paths of - # various fixed files (the first column of the array, where columns are - # delineated by the pipe symbol "|") to the full paths to surface climatology - # files (on the native FV3-LAM grid) in the FIXlam directory derived from - # the corresponding surface climatology fields (the second column of the - # array). - # - #----------------------------------------------------------------------- - # - FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING: [ - "FNALBC | snowfree_albedo", - "FNALBC2 | facsf", - "FNTG3C | substrate_temperature", - "FNVEGC | vegetation_greenness", - "FNVETC | vegetation_type", - "FNSOTC | soil_type", - "FNVMNC | vegetation_greenness", - "FNVMXC | vegetation_greenness", - "FNSLPC | slope_type", - "FNABSC | maximum_snow_albedo" - ] - # #----------------------------------------------------------------------- diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index 755e1c95c4..1f95ea8f91 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -266,29 +266,31 @@ generation executable (exec_fp): # namelist file. # settings=" -'regional_grid_nml': { - 'plon': ${LON_CTR}, - 'plat': ${LAT_CTR}, - 'delx': ${DEL_ANGLE_X_SG}, - 'dely': ${DEL_ANGLE_Y_SG}, - 'lx': ${NEG_NX_OF_DOM_WITH_WIDE_HALO}, - 'ly': ${NEG_NY_OF_DOM_WITH_WIDE_HALO}, - 'pazi': ${PAZI}, - } +'regional_grid_nml': + 'plon': ${LON_CTR} + 'plat': ${LAT_CTR} + 'delx': ${DEL_ANGLE_X_SG} + 'dely': ${DEL_ANGLE_Y_SG} + 'lx': ${NEG_NX_OF_DOM_WITH_WIDE_HALO} + 'ly': ${NEG_NY_OF_DOM_WITH_WIDE_HALO} + 'pazi': ${PAZI} " -# -# Call the python script to create the namelist file. -# - ${USHdir}/set_namelist.py -q -u "$settings" -o ${rgnl_grid_nml_fp} || \ - print_err_msg_exit "\ -Call to python script set_namelist.py to set the variables in the -regional_esg_grid namelist file failed. Parameters passed to this script -are: - Full path to output namelist file: - rgnl_grid_nml_fp = \"${rgnl_grid_nml_fp}\" - Namelist settings specified on command line (these have highest precedence): - settings = -$settings" + + (cat << EOF +$settings +EOF +) | uw config realize \ + --input-format yaml \ + -o ${rgnl_grid_nml_fp} \ + -v \ + + err=$? + if [ $err -ne 0 ]; then + print_err_msg_exit "\ + Error creating regional_esg_grid namelist. + Settings for input are: + $settings" + fi # # Call the executable that generates the grid file. # @@ -611,7 +613,7 @@ failed." # #----------------------------------------------------------------------- # -# Call a function (set_FV3nml_sfc_climo_filenames) to set the values of +# Call a function (set_fv3nml_sfc_climo_filenames) to set the values of # those variables in the forecast model's namelist file that specify the # paths to the surface climatology files. These files will either already # be avaialable in a user-specified directory (SFC_CLIMO_DIR) or will be @@ -620,7 +622,7 @@ failed." # #----------------------------------------------------------------------- # -python3 $USHdir/set_FV3nml_sfc_climo_filenames.py \ +python3 $USHdir/set_fv3nml_sfc_climo_filenames.py \ --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ || print_err_msg_exit "\ Call to function to set surface climatology file names in the FV3 namelist diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh index b42c086624..60852095ee 100755 --- a/scripts/exregional_make_ics.sh +++ b/scripts/exregional_make_ics.sh @@ -546,64 +546,60 @@ fi # IMPORTANT: # If we want a namelist variable to be removed from the namelist file, # in the "settings" variable below, we need to set its value to the -# string "null". This is equivalent to setting its value to -# !!python/none -# in the base namelist file specified by FV3_NML_BASE_SUITE_FP or the -# suite-specific yaml settings file specified by FV3_NML_YAML_CONFIG_FP. -# -# It turns out that setting the variable to an empty string also works -# to remove it from the namelist! Which is better to use?? +# string "null". # settings=" -'config': { - 'fix_dir_target_grid': ${FIXlam}, - 'mosaic_file_target_grid': ${FIXlam}/${CRES}${DOT_OR_USCORE}mosaic.halo$((10#${NH4})).nc, - 'orog_dir_target_grid': ${FIXlam}, - 'orog_files_target_grid': ${CRES}${DOT_OR_USCORE}oro_data.tile${TILE_RGNL}.halo$((10#${NH4})).nc, - 'vcoord_file_target_grid': ${VCOORD_FILE}, - 'varmap_file': ${PARMdir}/ufs_utils/varmap_tables/${varmap_file}, - 'data_dir_input_grid': ${extrn_mdl_staging_dir}, - 'atm_files_input_grid': ${fn_atm}, - 'sfc_files_input_grid': ${fn_sfc}, - 'grib2_file_input_grid': \"${fn_grib2}\", - 'cycle_mon': $((10#${mm})), - 'cycle_day': $((10#${dd})), - 'cycle_hour': $((10#${hh})), - 'convert_atm': True, - 'convert_sfc': True, - 'convert_nst': ${convert_nst}, - 'regional': 1, - 'halo_bndy': $((10#${NH4})), - 'halo_blend': $((10#${HALO_BLEND})), - 'input_type': ${input_type}, - 'external_model': ${external_model}, - 'tracers_input': ${tracers_input}, - 'tracers': ${tracers}, - 'nsoill_out': $((10#${nsoill_out})), - 'geogrid_file_input_grid': ${geogrid_file_input_grid}, - 'vgtyp_from_climo': ${vgtyp_from_climo}, - 'sotyp_from_climo': ${sotyp_from_climo}, - 'vgfrc_from_climo': ${vgfrc_from_climo}, - 'minmax_vgfrc_from_climo': ${minmax_vgfrc_from_climo}, - 'lai_from_climo': ${lai_from_climo}, - 'tg3_from_soil': ${tg3_from_soil}, - 'thomp_mp_climo_file': ${thomp_mp_climo_file}, -} +'config': + 'fix_dir_target_grid': ${FIXlam} + 'mosaic_file_target_grid': ${FIXlam}/${CRES}${DOT_OR_USCORE}mosaic.halo$((10#${NH4})).nc + 'orog_dir_target_grid': ${FIXlam} + 'orog_files_target_grid': ${CRES}${DOT_OR_USCORE}oro_data.tile${TILE_RGNL}.halo$((10#${NH4})).nc + 'vcoord_file_target_grid': ${VCOORD_FILE} + 'varmap_file': ${PARMdir}/ufs_utils/varmap_tables/${varmap_file} + 'data_dir_input_grid': ${extrn_mdl_staging_dir} + 'atm_files_input_grid': ${fn_atm} + 'sfc_files_input_grid': ${fn_sfc} + 'grib2_file_input_grid': \"${fn_grib2}\" + 'cycle_mon': $((10#${mm})) + 'cycle_day': $((10#${dd})) + 'cycle_hour': $((10#${hh})) + 'convert_atm': True + 'convert_sfc': True + 'convert_nst': ${convert_nst} + 'regional': 1 + 'halo_bndy': $((10#${NH4})) + 'halo_blend': $((10#${HALO_BLEND})) + 'input_type': ${input_type} + 'external_model': ${external_model} + 'tracers_input': ${tracers_input} + 'tracers': ${tracers} + 'nsoill_out': $((10#${nsoill_out})) + 'geogrid_file_input_grid': ${geogrid_file_input_grid} + 'vgtyp_from_climo': ${vgtyp_from_climo} + 'sotyp_from_climo': ${sotyp_from_climo} + 'vgfrc_from_climo': ${vgfrc_from_climo} + 'minmax_vgfrc_from_climo': ${minmax_vgfrc_from_climo} + 'lai_from_climo': ${lai_from_climo} + 'tg3_from_soil': ${tg3_from_soil} + 'thomp_mp_climo_file': ${thomp_mp_climo_file} " -# -# Call the python script to create the namelist file. -# + + nml_fn="fort.41" -${USHdir}/set_namelist.py -q -u "$settings" -o ${nml_fn} + +(cat << EOF +$settings +EOF +) | uw config realize \ + --input-format yaml \ + -o ${nml_fn} \ + --output-format nml\ + -v \ + err=$? if [ $err -ne 0 ]; then - message_txt="Call to python script set_namelist.py to set the variables -in the namelist file read in by the ${exec_fn} executable failed. Parameters -passed to this script are: - Name of output namelist file: - nml_fn = \"${nml_fn}\" - Namelist settings specified on command line (these have highest precedence): - settings = + message_txt="Error creating namelist read by ${exec_fn} failed. + Settings for input are: $settings" if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then err_exit "${message_txt}" @@ -611,6 +607,7 @@ $settings" print_err_msg_exit "${message_txt}" fi fi + # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh index 695af1b409..fcde8e6f46 100755 --- a/scripts/exregional_make_lbcs.sh +++ b/scripts/exregional_make_lbcs.sh @@ -467,53 +467,47 @@ FORTRAN namelist file has not specified for this external LBC model (EXTRN_MDL_N # IMPORTANT: # If we want a namelist variable to be removed from the namelist file, # in the "settings" variable below, we need to set its value to the -# string "null". This is equivalent to setting its value to -# !!python/none -# in the base namelist file specified by FV3_NML_BASE_SUITE_FP or the -# suite-specific yaml settings file specified by FV3_NML_YAML_CONFIG_FP. -# -# It turns out that setting the variable to an empty string also works -# to remove it from the namelist! Which is better to use?? -# -settings=" -'config': { - 'fix_dir_target_grid': ${FIXlam}, - 'mosaic_file_target_grid': ${FIXlam}/${CRES}${DOT_OR_USCORE}mosaic.halo$((10#${NH4})).nc, - 'orog_dir_target_grid': ${FIXlam}, - 'orog_files_target_grid': ${CRES}${DOT_OR_USCORE}oro_data.tile${TILE_RGNL}.halo$((10#${NH4})).nc, - 'vcoord_file_target_grid': ${VCOORD_FILE}, - 'varmap_file': ${PARMdir}/ufs_utils/varmap_tables/${varmap_file}, - 'data_dir_input_grid': ${extrn_mdl_staging_dir}, - 'atm_files_input_grid': ${fn_atm}, - 'grib2_file_input_grid': \"${fn_grib2}\", - 'cycle_mon': $((10#${mm})), - 'cycle_day': $((10#${dd})), - 'cycle_hour': $((10#${hh})), - 'convert_atm': True, - 'regional': 2, - 'halo_bndy': $((10#${NH4})), - 'halo_blend': $((10#${HALO_BLEND})), - 'input_type': ${input_type}, - 'external_model': ${external_model}, - 'tracers_input': ${tracers_input}, - 'tracers': ${tracers}, - 'thomp_mp_climo_file': ${thomp_mp_climo_file}, -} +# string "null". +# + settings=" +'config': + 'fix_dir_target_grid': ${FIXlam} + 'mosaic_file_target_grid': ${FIXlam}/${CRES}${DOT_OR_USCORE}mosaic.halo$((10#${NH4})).nc + 'orog_dir_target_grid': ${FIXlam} + 'orog_files_target_grid': ${CRES}${DOT_OR_USCORE}oro_data.tile${TILE_RGNL}.halo$((10#${NH4})).nc + 'vcoord_file_target_grid': ${VCOORD_FILE} + 'varmap_file': ${PARMdir}/ufs_utils/varmap_tables/${varmap_file} + 'data_dir_input_grid': ${extrn_mdl_staging_dir} + 'atm_files_input_grid': ${fn_atm} + 'grib2_file_input_grid': \"${fn_grib2}\" + 'cycle_mon': $((10#${mm})) + 'cycle_day': $((10#${dd})) + 'cycle_hour': $((10#${hh})) + 'convert_atm': True + 'regional': 2 + 'halo_bndy': $((10#${NH4})) + 'halo_blend': $((10#${HALO_BLEND})) + 'input_type': ${input_type} + 'external_model': ${external_model} + 'tracers_input': ${tracers_input} + 'tracers': ${tracers} + 'thomp_mp_climo_file': ${thomp_mp_climo_file} " -# -# Call the python script to create the namelist file. -# + nml_fn="fort.41" - ${USHdir}/set_namelist.py -q -u "$settings" -o ${nml_fn} + (cat << EOF +$settings +EOF +) | uw config realize \ + --input-format yaml \ + -o ${nml_fn} \ + --output-format nml \ + -v \ + export err=$? if [ $err -ne 0 ]; then - message_txt="Call to python script set_namelist.py to set the variables -in the namelist file read in by the ${exec_fn} executable failed. Parameters -passed to this script are: - Name of output namelist file: - nml_fn = \"${nml_fn}\" - Namelist settings specified on command line (these have highest precedence): - settings = + message_txt="Error creating namelist read by ${exec_fn} failed. + Settings for input are: $settings" if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then err_exit "${message_txt}" diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index c5519d923c..723086b077 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -474,7 +474,7 @@ fi #----------------------------------------------------------------------- # if ([ "$STOCH" == "TRUE" ] && [ "${DO_ENSEMBLE}" = "TRUE" ]); then - python3 $USHdir/set_FV3nml_ens_stoch_seeds.py \ + python3 $USHdir/set_fv3nml_ens_stoch_seeds.py \ --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ --cdate "$CDATE" || print_err_msg_exit "\ Call to function to create the ensemble-based namelist for the current @@ -492,8 +492,7 @@ fi # if [ "${CPL_AQM}" = "TRUE" ] && [ "${PREDEF_GRID_NAME}" = "AQM_NA_13km" ]; then python3 $USHdir/update_input_nml.py \ - --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ - --run_dir "${DATA}" \ + --namelist "${DATA}/${FV3_NML_FN}" \ --aqm_na_13km || print_err_msg_exit "\ Call to function to update the FV3 input.nml file for air quality modeling using AQM_NA_13km for the current cycle's (cdate) run directory (DATA) failed: @@ -520,8 +519,7 @@ if [ "${DO_FCST_RESTART}" = "TRUE" ] && [ "$(ls -A ${DATA}/RESTART )" ]; then # Update FV3 input.nml for restart python3 $USHdir/update_input_nml.py \ - --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ - --run_dir "${DATA}" \ + --namelist "${DATA}/${FV3_NML_FN}" \ --restart export err=$? if [ $err -ne 0 ]; then diff --git a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh index 5bbe61f530..fe0e119b19 100755 --- a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh +++ b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh @@ -399,7 +399,7 @@ EOF uw template render \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} \ - -v \ + --verbose \ --values-file "${tmpfile}" err=$? diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh index 1fa249ecf8..7eb1ce4605 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh @@ -400,7 +400,7 @@ EOF uw template render \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} \ - -v \ + --verbose \ --values-file "${tmpfile}" err=$? diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh index 067c24ec07..458dcec33f 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh @@ -358,7 +358,7 @@ EOF uw template render \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} \ - -v \ + --verbose \ --values-file "${tmpfile}" err=$? diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh index e042b68bfe..fc735845c9 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh @@ -310,7 +310,7 @@ EOF uw template render \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} \ - -v \ + --verbose \ --values-file "${tmpfile}" err=$? diff --git a/scripts/exregional_run_met_pb2nc_obs.sh b/scripts/exregional_run_met_pb2nc_obs.sh index 10d1beba4d..92d39102fc 100755 --- a/scripts/exregional_run_met_pb2nc_obs.sh +++ b/scripts/exregional_run_met_pb2nc_obs.sh @@ -284,7 +284,7 @@ EOF uw template render \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} \ - -v \ + --verbose \ --values-file "${tmpfile}" err=$? diff --git a/scripts/exregional_run_met_pcpcombine.sh b/scripts/exregional_run_met_pcpcombine.sh index 4a9222707a..7eabe02901 100755 --- a/scripts/exregional_run_met_pcpcombine.sh +++ b/scripts/exregional_run_met_pcpcombine.sh @@ -362,7 +362,7 @@ EOF uw template render \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} \ - -v \ + --verbose \ --values-file "${tmpfile}" err=$? diff --git a/tests/test_python/test_set_FV3nml_ens_stoch_seeds.py b/tests/test_python/test_set_fv3nml_ens_stoch_seeds.py similarity index 68% rename from tests/test_python/test_set_FV3nml_ens_stoch_seeds.py rename to tests/test_python/test_set_fv3nml_ens_stoch_seeds.py index f87d57d53b..17bf74c04b 100644 --- a/tests/test_python/test_set_FV3nml_ens_stoch_seeds.py +++ b/tests/test_python/test_set_fv3nml_ens_stoch_seeds.py @@ -1,4 +1,4 @@ -""" Tests for set_FV3nml_ens_stoch_seeds.py """ +""" Tests for set_fv3nml_ens_stoch_seeds.py """ #pylint: disable=invalid-name @@ -15,18 +15,17 @@ set_env_var, ) -from set_FV3nml_ens_stoch_seeds import set_FV3nml_ens_stoch_seeds +from set_fv3nml_ens_stoch_seeds import set_fv3nml_ens_stoch_seeds class Testing(unittest.TestCase): """ Define the tests """ - def test_set_FV3nml_ens_stoch_seeds(self): + def test_set_fv3nml_ens_stoch_seeds(self): """ Call the function and make sure it doesn't fail""" os.chdir(self.mem_dir) - set_FV3nml_ens_stoch_seeds(cdate=self.cdate) + set_fv3nml_ens_stoch_seeds(cdate=self.cdate, expt_config=self.config) def setUp(self): define_macos_utilities() - set_env_var("DEBUG", True) set_env_var("VERBOSE", True) self.cdate = datetime(2021, 1, 1) test_dir = os.path.dirname(os.path.abspath(__file__)) @@ -55,17 +54,22 @@ def setUp(self): ) - set_env_var("USHdir", USHdir) set_env_var("ENSMEM_INDX", 2) - set_env_var("FV3_NML_FN", "input.nml") - set_env_var("FV3_NML_FP", os.path.join(self.mem_dir, "input.nml")) - set_env_var("DO_SHUM", True) - set_env_var("DO_SKEB", True) - set_env_var("DO_SPPT", True) - set_env_var("DO_SPP", True) - set_env_var("DO_LSM_SPP", True) - ISEED_SPP = [4, 5, 6, 7, 8] - set_env_var("ISEED_SPP", ISEED_SPP) + + self.config = { + "workflow": { + "VERBOSE": True, + "FV3_NML_FN": "input.nml", + }, + "global": { + "DO_SHUM": True, + "DO_SKEB": True, + "DO_SPPT": True, + "DO_SPP": True, + "DO_LSM_SPP": True, + "ISEED_SPP": [4, 5, 6, 7, 8], + }, + } def tearDown(self): self.tmp_dir.cleanup() diff --git a/tests/test_python/test_set_FV3nml_sfc_climo_filenames.py b/tests/test_python/test_set_fv3nml_sfc_climo_filenames.py similarity index 69% rename from tests/test_python/test_set_FV3nml_sfc_climo_filenames.py rename to tests/test_python/test_set_fv3nml_sfc_climo_filenames.py index 131af70506..b0daf50fea 100644 --- a/tests/test_python/test_set_FV3nml_sfc_climo_filenames.py +++ b/tests/test_python/test_set_fv3nml_sfc_climo_filenames.py @@ -1,4 +1,4 @@ -""" Tests for set_FV3nml_sfc_climo_filenames.py """ +""" Tests for set_fv3nml_sfc_climo_filenames.py """ #pylint: disable=invalid-name @@ -12,13 +12,13 @@ mkdir_vrfy, set_env_var, ) -from set_FV3nml_sfc_climo_filenames import set_FV3nml_sfc_climo_filenames +from set_fv3nml_sfc_climo_filenames import set_fv3nml_sfc_climo_filenames class Testing(unittest.TestCase): """ Define the tests """ - def test_set_FV3nml_sfc_climo_filenames(self): + def test_set_fv3nml_sfc_climo_filenames(self): """ Call the function and don't raise an Exception. """ - set_FV3nml_sfc_climo_filenames() + set_fv3nml_sfc_climo_filenames(config=self.config) def setUp(self): define_macos_utilities() @@ -42,13 +42,15 @@ def setUp(self): os.path.join(PARMdir, "input.nml.FV3"), os.path.join(EXPTDIR, "input.nml"), ) - set_env_var("PARMdir", PARMdir) - set_env_var("EXPTDIR", EXPTDIR) - set_env_var("FIXlam", FIXlam) - set_env_var("DO_ENSEMBLE", False) - set_env_var("CRES", "C3357") - set_env_var("RUN_ENVIR", "nco") - set_env_var("FV3_NML_FP", os.path.join(EXPTDIR, "input.nml")) + self.config = { + "CRES": "C3357", + "DO_ENSEMBLE": False, + "EXPTDIR": EXPTDIR, + "FIXlam": FIXlam, + "FV3_NML_FP": os.path.join(EXPTDIR, "input.nml"), + "PARMdir": PARMdir, + "RUN_ENVIR": "nco", + } def tearDown(self): self.tmp_dir.cleanup() diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index a678a61132..ec2b95c3f3 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -14,6 +14,8 @@ from subprocess import STDOUT, CalledProcessError, check_output from textwrap import dedent +from uwtools.api.config import get_nml_config, get_yaml_config, realize + from python_utils import ( log_info, import_vars, @@ -30,9 +32,8 @@ ) from setup import setup -from set_FV3nml_sfc_climo_filenames import set_FV3nml_sfc_climo_filenames +from set_fv3nml_sfc_climo_filenames import set_fv3nml_sfc_climo_filenames from get_crontab_contents import add_crontab_line -from set_namelist import set_namelist from check_python_version import check_python_version # pylint: disable=too-many-locals,too-many-branches, too-many-statements @@ -506,24 +507,23 @@ def generate_FV3LAM_wflow( # # ----------------------------------------------------------------------- # - # Call the set_namelist.py script to create a new FV3 namelist file (full - # path specified by FV3_NML_FP) using the file FV3_NML_BASE_SUITE_FP as - # the base (i.e. starting) namelist file, with physics-suite-dependent - # modifications to the base file specified in the yaml configuration file - # FV3_NML_YAML_CONFIG_FP (for the physics suite specified by CCPP_PHYS_SUITE), - # and with additional physics-suite-independent modifications specified - # in the variable "settings" set above. + # Create a new FV3 namelist file # # ----------------------------------------------------------------------- # - args=[ "-n", FV3_NML_BASE_SUITE_FP, - "-c", FV3_NML_YAML_CONFIG_FP, CCPP_PHYS_SUITE, - "-u", settings_str, - "-o", FV3_NML_FP, - ] - if not debug: - args.append("-q") - set_namelist(args) + + physics_cfg = get_yaml_config(FV3_NML_YAML_CONFIG_FP) + base_namelist = get_nml_config(FV3_NML_BASE_SUITE_FP) + base_namelist.update_values(physics_cfg[CCPP_PHYS_SUITE]) + base_namelist.update_values(settings) + for sect, values in base_namelist.copy().items(): + if not values: + del base_namelist[sect] + continue + for k, v in values.copy().items(): + if v is None: + del base_namelist[sect][k] + base_namelist.dump(FV3_NML_FP) # # If not running the TN_MAKE_GRID task (which implies the workflow will # use pregenerated grid files), set the namelist variables specifying @@ -538,7 +538,7 @@ def generate_FV3LAM_wflow( # if not expt_config['rocoto']['tasks'].get('task_make_grid'): - set_FV3nml_sfc_climo_filenames(debug) + set_fv3nml_sfc_climo_filenames(flatten_dict(expt_config), debug) # # ----------------------------------------------------------------------- @@ -652,14 +652,13 @@ def generate_FV3LAM_wflow( #----------------------------------------------------------------------- # if any((DO_SPP, DO_SPPT, DO_SHUM, DO_SKEB, DO_LSM_SPP)): - - args=[ "-n", FV3_NML_FP, - "-u", settings_str, - "-o", FV3_NML_STOCH_FP, - ] - if not debug: - args.append("-q") - set_namelist(args) + realize( + input_config=FV3_NML_FP, + input_format="nml", + output_file=FV3_NML_STOCH_FP, + output_format="nml", + supplemental_configs=[settings], + ) # # ----------------------------------------------------------------------- diff --git a/ush/set_FV3nml_ens_stoch_seeds.py b/ush/set_fv3nml_ens_stoch_seeds.py similarity index 64% rename from ush/set_FV3nml_ens_stoch_seeds.py rename to ush/set_fv3nml_ens_stoch_seeds.py index c8a90e2797..3459fa8707 100644 --- a/ush/set_FV3nml_ens_stoch_seeds.py +++ b/ush/set_fv3nml_ens_stoch_seeds.py @@ -1,32 +1,27 @@ #!/usr/bin/env python3 +""" +Updates stochastic physics parameters in the namelist based on user configuration settings. +""" + +import argparse +import datetime as dt import os import sys -import argparse from textwrap import dedent -from datetime import datetime + +from uwtools.api.config import realize from python_utils import ( - print_input_args, - print_info_msg, - print_err_msg_exit, - date_to_str, - mkdir_vrfy, - cp_vrfy, - cd_vrfy, - str_to_type, - import_vars, - set_env_var, - define_macos_utilities, cfg_to_yaml_str, + import_vars, load_shell_config, - flatten_dict, + print_input_args, + print_info_msg, ) -from set_namelist import set_namelist - -def set_FV3nml_ens_stoch_seeds(cdate): +def set_fv3nml_ens_stoch_seeds(cdate, expt_config): """ This function, for an ensemble-enabled experiment (i.e. for an experiment for which the workflow configuration variable @@ -39,15 +34,20 @@ def set_FV3nml_ens_stoch_seeds(cdate): called as part of the TN_RUN_FCST task. Args: - cdate + cdate the cycle + expt_config the in-memory dict representing the experiment configuration Returns: None """ print_input_args(locals()) - # import all environment variables - import_vars() + fv3_nml_fn = expt_config["workflow"]["FV3_NML_FN"] + verbose = expt_config["workflow"]["VERBOSE"] + + # set variables important to this function from the experiment definition + import_vars(dictionary=expt_config["global"]) + # pylint: disable=undefined-variable # # ----------------------------------------------------------------------- @@ -57,9 +57,9 @@ def set_FV3nml_ens_stoch_seeds(cdate): # # ----------------------------------------------------------------------- # - fv3_nml_ensmem_fp = f"{os.getcwd()}{os.sep}{FV3_NML_FN}" + fv3_nml_ensmem_fp = f"{os.getcwd()}{os.sep}{fv3_nml_fn}" - ensmem_num = int(ENSMEM_INDX) + ensmem_num = int(os.environ["ENSMEM_INDX"]) cdate_i = int(cdate.strftime("%Y%m%d%H")) @@ -95,49 +95,39 @@ def set_FV3nml_ens_stoch_seeds(cdate): settings["nam_sfcperts"] = {"iseed_lndp": [iseed_lsm_spp]} - settings_str = cfg_to_yaml_str(settings) - print_info_msg( dedent( f""" The variable 'settings' specifying seeds in '{fv3_nml_ensmem_fp}' has been set as follows: - settings =\n\n""" - ) - + settings_str, - verbose=VERBOSE, - ) + settings =\n\n - try: - set_namelist( - ["-q", "-n", fv3_nml_ensmem_fp, "-u", settings_str, "-o", fv3_nml_ensmem_fp] - ) - except: - print_err_msg_exit( - dedent( - f""" - Call to python script set_namelist.py to set the variables in the FV3 - namelist file that specify the paths to the surface climatology files - failed. Parameters passed to this script are: - Full path to base namelist file: - FV3_NML_FP = '{FV3_NML_FP}' - Full path to output namelist file: - fv3_nml_ensmem_fp = '{fv3_nml_ensmem_fp}' - Namelist settings specified on command line (these have highest precedence):\n - settings =\n\n""" - ) - + settings_str + {cfg_to_yaml_str(settings)}""" + ), + verbose=verbose, + ) + realize( + input_config=fv3_nml_ensmem_fp, + input_format="nml", + output_file=fv3_nml_ensmem_fp, + output_format="nml", + supplemental_configs=[settings], ) - def parse_args(argv): """Parse command line arguments""" parser = argparse.ArgumentParser( description="Creates stochastic seeds for an ensemble experiment." ) - parser.add_argument("-c", "--cdate", dest="cdate", required=True, help="Date.") + parser.add_argument( + "-c", "--cdate", + dest="cdate", + required=True, + type=lambda d: dt.datetime.strptime(d, '%Y%m%d%H'), + help="Date.", + ) parser.add_argument( "-p", @@ -153,6 +143,4 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) cfg = load_shell_config(args.path_to_defns) - cfg = flatten_dict(cfg) - import_vars(dictionary=cfg) - set_FV3nml_ens_stoch_seeds(str_to_type(args.cdate)) + set_fv3nml_ens_stoch_seeds(args.cdate, cfg) diff --git a/ush/set_FV3nml_sfc_climo_filenames.py b/ush/set_fv3nml_sfc_climo_filenames.py similarity index 53% rename from ush/set_FV3nml_sfc_climo_filenames.py rename to ush/set_fv3nml_sfc_climo_filenames.py index a1ffaa57ef..417aa0b5ee 100644 --- a/ush/set_FV3nml_sfc_climo_filenames.py +++ b/ush/set_fv3nml_sfc_climo_filenames.py @@ -1,33 +1,42 @@ #!/usr/bin/env python3 +""" +Update filenames for surface climotology files in the namelist. +""" + +import argparse import os +import re import sys -import argparse from textwrap import dedent +from uwtools.api.config import get_yaml_config, realize + from python_utils import ( - print_input_args, - print_info_msg, - print_err_msg_exit, + cfg_to_yaml_str, check_var_valid_value, - mv_vrfy, - mkdir_vrfy, - cp_vrfy, - rm_vrfy, + flatten_dict, import_vars, - set_env_var, - load_config_file, load_shell_config, - flatten_dict, - define_macos_utilities, - find_pattern_in_str, - cfg_to_yaml_str, + print_info_msg, ) -from set_namelist import set_namelist +VERBOSE = os.environ.get("VERBOSE", "true") + +NEEDED_VARS = [ + "CRES", + "DO_ENSEMBLE", + "EXPTDIR", + "FIXlam", + "FV3_NML_FP", + "PARMdir", + "RUN_ENVIR", + ] + +# pylint: disable=undefined-variable -def set_FV3nml_sfc_climo_filenames(debug=False): +def set_fv3nml_sfc_climo_filenames(config, debug=False): """ This function sets the values of the variables in the forecast model's namelist file that specify the paths to the surface @@ -43,13 +52,9 @@ def set_FV3nml_sfc_climo_filenames(debug=False): None """ - # import all environment variables - import_vars() + import_vars(dictionary=config, env_vars=NEEDED_VARS) - # fixed file mapping variables - fixed_cfg = load_config_file(os.path.join(PARMdir, "fixed_files_mapping.yaml")) - IMPORTS = ["SFC_CLIMO_FIELDS", "FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING"] - import_vars(dictionary=flatten_dict(fixed_cfg), env_vars=IMPORTS) + fixed_cfg = get_yaml_config(os.path.join(PARMdir, "fixed_files_mapping.yaml"))["fixed_files"] # The regular expression regex_search set below will be used to extract # from the elements of the array FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING @@ -68,18 +73,16 @@ def set_FV3nml_sfc_climo_filenames(debug=False): dummy_run_dir += os.sep + "any_ensmem" namsfc_dict = {} - for mapping in FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING: - tup = find_pattern_in_str(regex_search, mapping) - nml_var_name = tup[0] - sfc_climo_field_name = tup[1] + for mapping in fixed_cfg["FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING"]: + nml_var_name, sfc_climo_field_name = re.search(regex_search, mapping).groups() - check_var_valid_value(sfc_climo_field_name, SFC_CLIMO_FIELDS) + check_var_valid_value(sfc_climo_field_name, fixed_cfg["SFC_CLIMO_FIELDS"]) - fp = os.path.join(FIXlam, f"{CRES}.{sfc_climo_field_name}.{suffix}") + file_path = os.path.join(FIXlam, f"{CRES}.{sfc_climo_field_name}.{suffix}") if RUN_ENVIR != "nco": - fp = os.path.relpath(os.path.realpath(fp), start=dummy_run_dir) + file_path = os.path.relpath(os.path.realpath(file_path), start=dummy_run_dir) - namsfc_dict[nml_var_name] = fp + namsfc_dict[nml_var_name] = file_path settings["namsfc_dict"] = namsfc_dict settings_str = cfg_to_yaml_str(settings) @@ -89,40 +92,22 @@ def set_FV3nml_sfc_climo_filenames(debug=False): f""" The variable 'settings' specifying values of the namelist variables has been set as follows:\n - settings =\n\n""" - ) - + settings_str, + settings = + + {settings_str} + """ + ), verbose=debug, ) - # Rename the FV3 namelist and call set_namelist - fv3_nml_base_fp = f"{FV3_NML_FP}.base" - mv_vrfy(f"{FV3_NML_FP} {fv3_nml_base_fp}") - - try: - set_namelist( - ["-q", "-n", fv3_nml_base_fp, "-u", settings_str, "-o", FV3_NML_FP] - ) - except: - print_err_msg_exit( - dedent( - f""" - Call to python script set_namelist.py to set the variables in the FV3 - namelist file that specify the paths to the surface climatology files - failed. Parameters passed to this script are: - Full path to base namelist file: - fv3_nml_base_fp = '{fv3_nml_base_fp}' - Full path to output namelist file: - FV3_NML_FP = '{FV3_NML_FP}' - Namelist settings specified on command line (these have highest precedence):\n - settings =\n\n""" - ) - + settings_str + realize( + input_config=FV3_NML_FP, + input_format="nml", + output_file=FV3_NML_FP, + output_format="nml", + supplemental_configs=[settings], ) - rm_vrfy(f"{fv3_nml_base_fp}") - - def parse_args(argv): """Parse command line arguments""" parser = argparse.ArgumentParser(description="Set surface climatology fields.") @@ -144,5 +129,4 @@ def parse_args(argv): args = parse_args(sys.argv[1:]) cfg = load_shell_config(args.path_to_defns) cfg = flatten_dict(cfg) - import_vars(dictionary=cfg) - set_FV3nml_sfc_climo_filenames(args.debug) + set_fv3nml_sfc_climo_filenames(cfg, args.debug) diff --git a/ush/set_namelist.py b/ush/set_namelist.py deleted file mode 100755 index e578d3201f..0000000000 --- a/ush/set_namelist.py +++ /dev/null @@ -1,355 +0,0 @@ -#!/usr/bin/env python3 - -""" -This utility updates a Fortran namelist file using the f90nml package. The -settings that are modified are supplied via command line YAML-formatted string -and/or YAML configuration files. - -Additionally, the tool can be used to create a YAML file from an input namelist, -or the difference between two namelists. - -The user configuration file should contain a heirarchy that follows the -heirarchy for the Fortran namelist. An example of modifying an FV3 namelist: - - Configuration file contains: - - fv_core_nml: - k_split: 4 - n_split: 5 - - gfs_physics_nml: - do_sppt: True - -The output namelist will differ from the input namelist by only these three -settings. If one of these sections and/or variables did not previously exist, it -will be automatically created. It is up to the user to ensure that configuration -settings are provided under the correct sections and variable names. - -The optional base configuration file (provided via the -c command line argument) -contains the known set of configurations used and supported by the community, if -using the one provided in parm/FV3.input.yml. If maintaining this file -for a different set of configurations, ensure that the heirarchy is such that it -names the configuration at the top level (section), and the subsequent sections -match those in the F90 namelist that will be updated. - -Examples - - To show help options: - - set_namelist.py -h - - To produce a namelist (fv3_expt.nml) by specifying a physics package: - - set_namelist.py -n ../parm/input.nml.FV3 -c ../parm/FV3.input.yml FV3_HRRR - -o fv3_expt.nml - - To produce a YAML file (fv3_namelist.yml) from a user namelist: - - set_namelist.py -i my_namelist.nml -o fv3_namelist.nml -t yaml - - To produce a YAML file (fv3_my_namelist.yml) with differences from base nml: - - set_namelist.py -n ../parm/input.nml.FV3 -i my_namelist.nml -t yaml - -o fv3_my_namelist.nml - -Expected behavior: - - - A Fortran namelist that contains only user-defined settings will be - generated if no input namelist is provided. - - An unmodified copy of an input namelist will be generated in the - designated output location if no user-settings are provided. - - Command-line-entered settings over-ride settings in YAML configuration - file. - - Given a user namelist, the script can dump a YAML file. - - Given a user namelist and a base namelist, the script can dump the - difference in the two to a YAML file that can be included as a section - in the supported configs. -""" - -import argparse -import collections -import os -import sys - -import f90nml -import yaml - - -def config_exists(arg): - - """ - Checks whether the config file exists and if it contains the input - section. Returns the arg as provided if checks are passed. - """ - - # Agument is expected to be a 2-item list of file name and internal section - # name. - file_name = arg[0] - section_name = arg[1] - - file_exists(file_name) - - # Load the YAML file into a dictionary - with open(file_name, "r") as fn: - cfg = yaml.load(fn, Loader=yaml.Loader) - - # Grab only the section that is specified by the user - try: - cfg = cfg[section_name] - except KeyError: - msg = f"Section {section_name} does not exist in top level of {file_name}" - raise argparse.ArgumentTypeError(msg) - - return [cfg, section_name] - - -def file_exists(arg): - - """Check for existence of file""" - - if not os.path.exists(arg): - msg = f"{arg} does not exist!" - raise argparse.ArgumentTypeError(msg) - - return arg - - -def load_config(arg): - - """ - Check to ensure that the provided config file exists. If it does, load it - with YAML's safe loader and return the resulting dict. - """ - - return yaml.safe_load(arg) - - -def path_ok(arg): - - """ - Check whether the path to the file exists, and is writeable. Return the path - if it passes all checks, otherwise raise an error. - """ - - # Get the absolute path provided by arg - dir_name = os.path.abspath(os.path.dirname(arg)) - - # Ensure the arg path exists, and is writable. Raise error if not. - if os.path.lexists(dir_name) and os.access(dir_name, os.W_OK): - return arg - - msg = f"{arg} is not a writable path!" - raise argparse.ArgumentTypeError(msg) - - -def parse_args(argv): - - """ - Function maintains the arguments accepted by this script. Please see - Python's argparse documenation for more information about settings of each - argument. - """ - - parser = argparse.ArgumentParser( - description="Update a Fortran namelist with user-defined settings." - ) - - # Required - parser.add_argument( - "-o", - "--outfile", - help="Required: Full path to output file. This is a \ - namelist by default.", - required=True, - type=path_ok, - ) - - # Optional - parser.add_argument( - "-c", - "--config", - help="Full path to a YAML config file containing multiple \ - configurations, and the top-level section to use. Optional.", - metavar=("[FILE,", "SECTION]"), - nargs=2, - ) - parser.add_argument( - "-i", - "--input_nml", - help="Path to a user namelist. Use with -n and \ - -t yaml to get a YAML file to use with workflow.", - type=file_exists, - ) - parser.add_argument( - "-n", - "--basenml", - dest="nml", - help="Full path to the input Fortran namelist. Optional.", - type=file_exists, - ) - parser.add_argument( - "-t", - "--type", - choices=["nml", "yaml"], - default="nml", - help="Output file type.", - ) - parser.add_argument( - "-u", - "--user_config", - help="Command-line user config options in YAML-formatted \ - string. These options will override any provided in an \ - input file. Optional.", - metavar="YAML STRING", - type=load_config, - ) - - # Flags - parser.add_argument( - "-q", - "--quiet", - action="store_true", - help="If provided, suppress all output.", - ) - return parser.parse_args(argv) - - -def dict_diff(dict1, dict2): - - """ - Produces a dictionary of how dict2 differs from dict1 - """ - - diffs = {} - - # Loop through dict1 sections and key/value pairs - for sect, items in dict1.items(): - for key, val in items.items(): - - # If dict 2 has a different value, record the dict2 value - if val != dict2.get(sect, {}).get(key, ""): - if not diffs.get(sect): - diffs[sect] = {} - diffs[sect][key] = dict2.get(sect, {}).get(key) - - # Loop through dict2 sections and key/value pairs to catch any settings that - # may be present in the 2nd dict that weren't in the first. - for sect, items in dict2.items(): - for key, val in items.items(): - - # If dict1 has a diffent value than dict2, record the dict2 value - if val != dict1.get(sect, {}).get(key, ""): - - # Check to make sure it hasn't already been recorded - if diffs.get(sect, {}).get(key, "DNE") == "DNE": - if not diffs.get(sect): - diffs[sect] = {} - diffs[sect][key] = val - return diffs - - -def to_dict(odict): - - """Recursively convert OrderedDict to Python dict.""" - - if not isinstance(odict, collections.OrderedDict): - return odict - - ret = dict(odict) - for key, value in ret.items(): - if isinstance(value, collections.OrderedDict): - ret[key] = to_dict(value) - return ret - - -def update_dict(dest, newdict, quiet=False): - - """ - Overwrites all values in dest dictionary with values from newdict. Turn off - print statements with queit=True. - - Input: - - dest A dict that is to be updated. - newdict A dict containing sections and keys corresponding to - those in dest and potentially additional ones, that will be used to - update the dest dict. - quiet An optional boolean flag to turn off output. - - Output: - - None - - Result: - - The dest dict is updated in place. - """ - - for sect, values in newdict: - # If section is set to None, remove all contents from namelist - if values is None: - dest[sect] = {} - else: - for key, value in values.items(): - if not quiet: - print(f"Setting {sect}.{key} = {value}") - - # Remove key from dict if config is set to None - if value is None: - _ = dest[sect].pop(key, None) - else: - - try: - dest[sect][key] = value - except KeyError: - # Namelist section did not exist. Create it and update the value. - dest[sect] = {} - dest[sect][key] = value - - -def set_namelist(argv): - - """Using input command line arguments (cla), update a Fortran namelist file.""" - - # parse argumetns - cla = parse_args(argv) - if cla.config: - cla.config, _ = config_exists(cla.config) - - # Load base namelist into dict - nml = f90nml.Namelist() - if cla.nml is not None: - nml = f90nml.read(cla.nml) - - # Update namelist settings (nml) with config file settings (cfg) - cfg = {} - if cla.config is not None: - cfg = cla.config - update_dict(nml, cfg.items(), quiet=cla.quiet) - - # Update nml, overriding YAML if needed, with any command-line entries - if cla.user_config: - update_dict(nml, cla.user_config.items(), quiet=cla.quiet) - - # Write the resulting file - with open(cla.outfile, "w") as fn: - if cla.type == "nml": - nml.write(fn, sort=True) - - if cla.type == "yaml": - if cla.input_nml: - input_nml = f90nml.read(cla.input_nml) - - # Determine how input_nml differs from the configured namelist - diff = dict_diff(nml, input_nml) - - # Write diffs to YAML file - yaml.dump(diff, fn) - - else: - # Write the namelist to YAML file - yaml.dump(to_dict(nml.todict()), fn) - - -if __name__ == "__main__": - set_namelist(sys.argv[1:]) diff --git a/ush/update_input_nml.py b/ush/update_input_nml.py index 0f10c675b2..e975d9bc08 100644 --- a/ush/update_input_nml.py +++ b/ush/update_input_nml.py @@ -1,62 +1,41 @@ #!/usr/bin/env python3 +""" +Update the model namelist for a variety of different settings. +""" + +import argparse import os import sys -import argparse -import logging from textwrap import dedent +from uwtools.api.config import realize + from python_utils import ( - import_vars, print_input_args, print_info_msg, - print_err_msg_exit, cfg_to_yaml_str, - load_shell_config, - flatten_dict, ) -from set_namelist import set_namelist +VERBOSE = os.environ.get("VERBOSE", "true") - -def update_input_nml(run_dir): +def update_input_nml(namelist, restart, aqm_na_13km): """Update the FV3 input.nml file in the specified run directory Args: - run_dir: run directory + namelist: path to the namelist + restart: should forecast start from restart? + aqm_na_13km: should the 13km AQM config be used? + Returns: Boolean """ print_input_args(locals()) - - # import all environment variables - import_vars() - - # - # ----------------------------------------------------------------------- - # - # Update the FV3 input.nml file in the specified run directory. - # - # ----------------------------------------------------------------------- - # - print_info_msg( - f""" - Updating the FV3 input.nml file in the specified run directory (run_dir): - run_dir = '{run_dir}'""", - verbose=VERBOSE, - ) - # - # ----------------------------------------------------------------------- - # - # Set new values of the specific parameters to be updated. - # - # ----------------------------------------------------------------------- - # settings = {} # For restart run - if args.restart: + if restart: settings["fv_core_nml"] = { "external_ic": False, "make_nh": False, @@ -69,105 +48,68 @@ def update_input_nml(run_dir): settings["gfs_physics_nml"] = { "nstf_name": [2, 0, 0, 0, 0], } - + # For AQM_NA_13km domain for air quality modeling - if args.aqm_na_13km: + if aqm_na_13km: settings["fv_core_nml"] = { "k_split": 1, "n_split": 8, } - settings_str = cfg_to_yaml_str(settings) - print_info_msg( dedent( f""" - The variable 'settings' specifying values to be used in the FV3 'input.nml' - file for restart has been set as follows:\n - settings =\n\n""" - ) - + settings_str, + Updating {namelist} + + The updated values are: + + {cfg_to_yaml_str(settings)} + + """ + ), verbose=VERBOSE, ) - # - # ----------------------------------------------------------------------- - # - # Call a python script to update the experiment's actual FV3 INPUT.NML - # file for restart. - # - # ----------------------------------------------------------------------- - # - fv3_input_nml_fp = os.path.join(run_dir, FV3_NML_FN) - - try: - set_namelist( - [ - "-q", - "-n", - fv3_input_nml_fp, - "-u", - settings_str, - "-o", - fv3_input_nml_fp, - ] - ) - except: - logging.exception( - dedent( - f""" - Call to python script set_namelist.py to generate an FV3 namelist file - failed. Parameters passed to this script are: - Full path to base namelist file: - fv3_input_nml_fp = '{fv3_input_nml_fp}' - Full path to output namelist file: - fv3_input_nml_fp = '{fv3_input_nml_fp}' - Namelist settings specified on command line:\n - settings =\n\n""" - ) - + settings_str - ) - return False - - return True + # Update the experiment's FV3 INPUT.NML file + realize( + input_config=namelist, + input_format="nml", + output_file=namelist, + output_format="nml", + supplemental_configs=[settings], + ) def parse_args(argv): """Parse command line arguments""" parser = argparse.ArgumentParser(description="Update FV3 input.nml file for restart.") parser.add_argument( - "-r", "--run_dir", - dest="run_dir", + "-n", "--namelist", + dest="namelist", required=True, - help="Run directory." - ) - - parser.add_argument( - "-p", "--path-to-defns", - dest="path_to_defns", - required=True, - help="Path to var_defns file.", + help="Path to namelist to update.", ) parser.add_argument( "--restart", action='store_true', - help='Update for restart') + help='Update for restart', + ) parser.add_argument( "--aqm_na_13km", action='store_true', - help='Update for AQM_NA_13km in air quality modeling') + help='Update for AQM_NA_13km in air quality modeling', + ) return parser.parse_args(argv) if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) - cfg = flatten_dict(cfg) - import_vars(dictionary=cfg) update_input_nml( - run_dir=args.run_dir, + namelist=args.namelist, + restart=args.restart, + aqm_na_13km=args.aqm_na_13km, ) From 02cf222980951228168fbce4270d924115bafb9b Mon Sep 17 00:00:00 2001 From: "Chan-Hoo.Jeon-NOAA" <60152248+chan-hoo@users.noreply.github.com> Date: Wed, 27 Mar 2024 09:44:48 -0400 Subject: [PATCH 11/42] [develop] Update AQM task scripts with those of production/aqm_dev branch (#1060) * Update the AQM task scripts with those of the production or aqm_dev branch. * Set the nco environment variable in the J-job scripts directly (AQM tasks only). * Change the vertical directory structure of the AQM task scripts to meet the NCO implementation standards. * Remove the nco tests from the we2e tests and nco sample scripts. * Change the file names of J-job and ex-scripts (AQM tasks only). --- .cicd/scripts/srw_ftest.sh | 1 - .cicd/scripts/srw_test.sh | 4 +- .gitignore | 5 + Externals.cfg | 4 +- aqm_environment.yml | 2 +- devbuild.sh | 34 +++ jobs/JREGIONAL_BIAS_CORRECTION_O3 | 104 --------- jobs/JREGIONAL_BIAS_CORRECTION_PM25 | 104 --------- jobs/JREGIONAL_MAKE_ICS | 4 +- jobs/JREGIONAL_MAKE_LBCS | 4 +- jobs/JREGIONAL_NEXUS_POST_SPLIT | 103 --------- jobs/JREGIONAL_POINT_SOURCE | 103 --------- jobs/JREGIONAL_POST_STAT_O3 | 101 -------- jobs/JREGIONAL_POST_STAT_PM25 | 101 -------- jobs/JREGIONAL_PRE_POST_STAT | 108 --------- jobs/JREGIONAL_RUN_POST | 2 +- jobs/{JREGIONAL_AQM_ICS => JSRW_AQM_ICS} | 123 +++++++--- jobs/{JREGIONAL_AQM_LBCS => JSRW_AQM_LBCS} | 125 +++++++--- jobs/JSRW_BIAS_CORRECTION_O3 | 161 +++++++++++++ jobs/JSRW_BIAS_CORRECTION_PM25 | 161 +++++++++++++ ...IONAL_FIRE_EMISSION => JSRW_FIRE_EMISSION} | 135 +++++++---- ...NAL_NEXUS_EMISSION => JSRW_NEXUS_EMISSION} | 114 ++++++--- ...IONAL_NEXUS_GFS_SFC => JSRW_NEXUS_GFS_SFC} | 155 ++++++++++--- jobs/JSRW_NEXUS_POST_SPLIT | 163 +++++++++++++ jobs/JSRW_POINT_SOURCE | 159 +++++++++++++ jobs/JSRW_POST_STAT_O3 | 160 +++++++++++++ jobs/JSRW_POST_STAT_PM25 | 158 +++++++++++++ jobs/JSRW_PRE_POST_STAT | 173 ++++++++++++++ modulefiles/build_hera_intel.lua | 1 + ...python_srw_cmaq.lua => python_srw_aqm.lua} | 0 modulefiles/tasks/cheyenne/aqm_ics.local.lua | 2 +- modulefiles/tasks/cheyenne/aqm_lbcs.local.lua | 2 +- .../tasks/cheyenne/fire_emission.local.lua | 2 +- .../tasks/cheyenne/nexus_emission.local.lua | 2 +- .../tasks/cheyenne/nexus_gfs_sfc.local.lua | 2 +- .../tasks/cheyenne/nexus_post_split.local.lua | 2 +- .../tasks/cheyenne/point_source.local.lua | 2 +- .../tasks/cheyenne/pre_post_stat.local.lua | 2 +- modulefiles/tasks/derecho/aqm_ics.local.lua | 2 +- modulefiles/tasks/derecho/aqm_lbcs.local.lua | 2 +- .../tasks/derecho/fire_emission.local.lua | 2 +- .../tasks/derecho/nexus_emission.local.lua | 2 +- .../tasks/derecho/nexus_gfs_sfc.local.lua | 2 +- .../tasks/derecho/nexus_post_split.local.lua | 2 +- .../tasks/derecho/point_source.local.lua | 2 +- .../tasks/derecho/pre_post_stat.local.lua | 2 +- modulefiles/tasks/hera/aqm_ics.local.lua | 4 +- modulefiles/tasks/hera/aqm_lbcs.local.lua | 4 +- .../tasks/hera/fire_emission.local.lua | 4 +- .../tasks/hera/nexus_emission.local.lua | 4 +- .../tasks/hera/nexus_post_split.local.lua | 4 +- modulefiles/tasks/hera/point_source.local.lua | 2 +- .../tasks/hera/pre_post_stat.local.lua | 2 +- modulefiles/tasks/hercules/aqm_ics.local.lua | 2 +- .../tasks/hercules/fire_emission.local.lua | 2 +- .../tasks/hercules/nexus_emission.local.lua | 2 +- .../tasks/hercules/nexus_post_split.local.lua | 2 +- .../tasks/hercules/point_source.local.lua | 2 +- modulefiles/tasks/orion/aqm_ics.local.lua | 2 +- .../tasks/orion/fire_emission.local.lua | 2 +- .../tasks/orion/nexus_emission.local.lua | 2 +- .../tasks/orion/nexus_post_split.local.lua | 2 +- .../tasks/orion/point_source.local.lua | 2 +- parm/aqm.rc | 16 +- parm/wflow/aqm_post.yaml | 14 +- parm/wflow/aqm_prep.yaml | 31 ++- parm/wflow/default_workflow.yaml | 21 +- scripts/exregional_fire_emission.sh | 198 ---------------- scripts/exregional_make_ics.sh | 26 ++- scripts/exregional_make_lbcs.sh | 6 +- scripts/exregional_run_fcst.sh | 69 +++--- ...exregional_aqm_ics.sh => exsrw_aqm_ics.sh} | 79 +++---- ...regional_aqm_lbcs.sh => exsrw_aqm_lbcs.sh} | 128 +++++------ ...tion_o3.sh => exsrw_bias_correction_o3.sh} | 216 +++++++----------- ..._pm25.sh => exsrw_bias_correction_pm25.sh} | 177 ++++++-------- scripts/exsrw_fire_emission.sh | 167 ++++++++++++++ ...us_emission.sh => exsrw_nexus_emission.sh} | 183 +++++++-------- ...exus_gfs_sfc.sh => exsrw_nexus_gfs_sfc.sh} | 84 +++---- ...ost_split.sh => exsrw_nexus_post_split.sh} | 47 ++-- ..._point_source.sh => exsrw_point_source.sh} | 26 +-- ..._post_stat_o3.sh => exsrw_post_stat_o3.sh} | 62 ++--- ...t_stat_pm25.sh => exsrw_post_stat_pm25.sh} | 89 ++++---- ...re_post_stat.sh => exsrw_pre_post_stat.sh} | 27 ++- tests/WE2E/machine_suites/comprehensive | 6 - .../machine_suites/comprehensive.cheyenne | 6 - .../WE2E/machine_suites/comprehensive.derecho | 6 - .../machine_suites/comprehensive.noaacloud | 6 - tests/WE2E/machine_suites/comprehensive.orion | 6 - tests/WE2E/machine_suites/coverage.cheyenne | 1 - tests/WE2E/machine_suites/coverage.derecho | 1 - tests/WE2E/machine_suites/coverage.gaea | 2 - .../WE2E/machine_suites/coverage.hera.gnu.com | 1 - tests/WE2E/machine_suites/coverage.jet | 1 - tests/WE2E/machine_suites/coverage.orion | 1 - tests/WE2E/machine_suites/fundamental | 2 - tests/WE2E/run_WE2E_tests.py | 17 -- .../default_configs/config.nco.yaml | 1 - ..._ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16.yaml | 26 --- ..._lbcs_FV3GFS_timeoffset_suite_GFS_v16.yaml | 26 --- ...FS_suite_GFS_v15_thompson_mynn_lam3km.yaml | 28 --- ...act_25km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml | 30 --- .../wflow_features/config.nco_ensemble.yaml | 34 --- .../config.nco_inline_post.yaml | 1 - .../test_python/test_generate_FV3LAM_wflow.py | 29 +-- ush/config.aqm.community.yaml | 2 +- ush/config.aqm.nco.realtime.yaml | 99 -------- ush/config.nco.yaml | 41 ---- ush/config_defaults.yaml | 126 +++------- ush/create_aqm_rc_file.py | 11 +- ush/job_preamble.sh | 53 ++--- ush/machine/hera.yaml | 19 +- ush/machine/wcoss2.yaml | 15 -- ush/setup.py | 36 +-- 113 files changed, 2523 insertions(+), 2424 deletions(-) delete mode 100755 jobs/JREGIONAL_BIAS_CORRECTION_O3 delete mode 100755 jobs/JREGIONAL_BIAS_CORRECTION_PM25 delete mode 100755 jobs/JREGIONAL_NEXUS_POST_SPLIT delete mode 100755 jobs/JREGIONAL_POINT_SOURCE delete mode 100755 jobs/JREGIONAL_POST_STAT_O3 delete mode 100755 jobs/JREGIONAL_POST_STAT_PM25 delete mode 100755 jobs/JREGIONAL_PRE_POST_STAT rename jobs/{JREGIONAL_AQM_ICS => JSRW_AQM_ICS} (50%) rename jobs/{JREGIONAL_AQM_LBCS => JSRW_AQM_LBCS} (50%) create mode 100755 jobs/JSRW_BIAS_CORRECTION_O3 create mode 100755 jobs/JSRW_BIAS_CORRECTION_PM25 rename jobs/{JREGIONAL_FIRE_EMISSION => JSRW_FIRE_EMISSION} (57%) rename jobs/{JREGIONAL_NEXUS_EMISSION => JSRW_NEXUS_EMISSION} (50%) rename jobs/{JREGIONAL_NEXUS_GFS_SFC => JSRW_NEXUS_GFS_SFC} (51%) create mode 100755 jobs/JSRW_NEXUS_POST_SPLIT create mode 100755 jobs/JSRW_POINT_SOURCE create mode 100755 jobs/JSRW_POST_STAT_O3 create mode 100755 jobs/JSRW_POST_STAT_PM25 create mode 100755 jobs/JSRW_PRE_POST_STAT rename modulefiles/{python_srw_cmaq.lua => python_srw_aqm.lua} (100%) delete mode 100755 scripts/exregional_fire_emission.sh rename scripts/{exregional_aqm_ics.sh => exsrw_aqm_ics.sh} (68%) rename scripts/{exregional_aqm_lbcs.sh => exsrw_aqm_lbcs.sh} (67%) rename scripts/{exregional_bias_correction_o3.sh => exsrw_bias_correction_o3.sh} (68%) rename scripts/{exregional_bias_correction_pm25.sh => exsrw_bias_correction_pm25.sh} (68%) create mode 100755 scripts/exsrw_fire_emission.sh rename scripts/{exregional_nexus_emission.sh => exsrw_nexus_emission.sh} (63%) rename scripts/{exregional_nexus_gfs_sfc.sh => exsrw_nexus_gfs_sfc.sh} (68%) rename scripts/{exregional_nexus_post_split.sh => exsrw_nexus_post_split.sh} (73%) rename scripts/{exregional_point_source.sh => exsrw_point_source.sh} (83%) rename scripts/{exregional_post_stat_o3.sh => exsrw_post_stat_o3.sh} (81%) rename scripts/{exregional_post_stat_pm25.sh => exsrw_post_stat_pm25.sh} (79%) rename scripts/{exregional_pre_post_stat.sh => exsrw_pre_post_stat.sh} (81%) delete mode 120000 tests/WE2E/test_configs/default_configs/config.nco.yaml delete mode 100644 tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16.yaml delete mode 100644 tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16.yaml delete mode 100644 tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.yaml delete mode 100644 tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml delete mode 100644 tests/WE2E/test_configs/wflow_features/config.nco_ensemble.yaml delete mode 120000 tests/WE2E/test_configs/wflow_features/config.nco_inline_post.yaml delete mode 100644 ush/config.aqm.nco.realtime.yaml delete mode 100644 ush/config.nco.yaml diff --git a/.cicd/scripts/srw_ftest.sh b/.cicd/scripts/srw_ftest.sh index 5479e8b46d..d98d20c831 100755 --- a/.cicd/scripts/srw_ftest.sh +++ b/.cicd/scripts/srw_ftest.sh @@ -46,7 +46,6 @@ fi # Test directories we2e_experiment_base_dir="${workspace}/expt_dirs" we2e_test_dir="${workspace}/tests/WE2E" -nco_dir="${workspace}/nco_dirs" pwd diff --git a/.cicd/scripts/srw_test.sh b/.cicd/scripts/srw_test.sh index 1bffe083bd..76ddf020df 100755 --- a/.cicd/scripts/srw_test.sh +++ b/.cicd/scripts/srw_test.sh @@ -28,7 +28,6 @@ fi # Test directories we2e_experiment_base_dir="${workspace}/expt_dirs" we2e_test_dir="${workspace}/tests/WE2E" -nco_dir="${workspace}/nco_dirs" # Run the end-to-end tests. if "${SRW_WE2E_COMPREHENSIVE_TESTS}"; then @@ -41,8 +40,7 @@ cd ${we2e_test_dir} # Progress file progress_file="${workspace}/we2e_test_results-${platform}-${SRW_COMPILER}.txt" ./setup_WE2E_tests.sh ${platform} ${SRW_PROJECT} ${SRW_COMPILER} ${test_type} \ - --expt_basedir=${we2e_experiment_base_dir} \ - --opsroot=${nco_dir} | tee ${progress_file} + --expt_basedir=${we2e_experiment_base_dir} | tee ${progress_file} # Set exit code to number of failures set +e diff --git a/.gitignore b/.gitignore index ad778d0bc1..2b362272f6 100644 --- a/.gitignore +++ b/.gitignore @@ -5,6 +5,9 @@ build/ fix/ include/ lib/ +parm/aqm_utils_parm/ +parm/nexus_config/ +parm/ufs_utils_parm/ share/ sorc/*/ tests/WE2E/WE2E_tests_*.yaml @@ -12,6 +15,8 @@ tests/WE2E/*.txt tests/WE2E/*.log tests/WE2E/log.* ush/__pycache__/ +ush/aqm_utils_python/ +ush/nexus_utils/ ush/config.yaml ush/python_utils/__pycache__/ ush/python_utils/workflow-tools/ diff --git a/Externals.cfg b/Externals.cfg index 4bae74b316..4b54c71d72 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -30,7 +30,7 @@ protocol = git repo_url = https://github.com/noaa-oar-arl/NEXUS # Specify either a branch name or a hash but not both. #branch = develop -hash = 6a7a994 +hash = 40346b6 local_path = sorc/arl_nexus required = True @@ -39,7 +39,7 @@ protocol = git repo_url = https://github.com/NOAA-EMC/AQM-utils # Specify either a branch name or a hash but not both. #branch = develop -hash = 694a139 +hash = d953bd1 local_path = sorc/AQM-utils required = True diff --git a/aqm_environment.yml b/aqm_environment.yml index 03d72f6706..afd8a7b634 100644 --- a/aqm_environment.yml +++ b/aqm_environment.yml @@ -9,5 +9,5 @@ dependencies: - pylint=2.17* - pytest=7.2* - scipy=1.10.* - - uwtools=1.0.0 + - uwtools=2.1* - xarray=2022.11.* diff --git a/devbuild.sh b/devbuild.sh index 05cc76312c..014fbdb3b7 100755 --- a/devbuild.sh +++ b/devbuild.sh @@ -502,4 +502,38 @@ else fi fi +# Copy config/python directories from component to main directory (EE2 compliance) +if [ "${BUILD_UFS_UTILS}" = "on" ]; then + if [ -d "${SRW_DIR}/parm/ufs_utils_parm" ]; then + rm -rf ${SRW_DIR}/parm/ufs_utils_parm + fi + cp -rp ${SRW_DIR}/sorc/UFS_UTILS/parm ${SRW_DIR}/parm/ufs_utils_parm +fi +if [ "${BUILD_UPP}" = "on" ]; then + if [ -d "${SRW_DIR}/parm/upp_parm" ]; then + rm -rf ${SRW_DIR}/parm/upp_parm + fi + cp -rp ${SRW_DIR}/sorc/UPP/parm ${SRW_DIR}/parm/upp_parm +fi +if [ "${BUILD_NEXUS}" = "on" ]; then + if [ -d "${SRW_DIR}/parm/nexus_config" ]; then + rm -rf ${SRW_DIR}/parm/nexus_config + fi + cp -rp ${SRW_DIR}/sorc/arl_nexus/config ${SRW_DIR}/parm/nexus_config + if [ -d "${SRW_DIR}/ush/nexus_utils" ]; then + rm -rf ${SRW_DIR}/ush/nexus_utils + fi + cp -rp ${SRW_DIR}/sorc/arl_nexus/utils ${SRW_DIR}/ush/nexus_utils +fi +if [ "${BUILD_AQM_UTILS}" = "on" ]; then + if [ -d "${SRW_DIR}/parm/aqm_utils_parm" ]; then + rm -rf ${SRW_DIR}/parm/aqm_utils_parm + fi + cp -rp ${SRW_DIR}/sorc/AQM-utils/parm ${SRW_DIR}/parm/aqm_utils_parm + if [ -d "${SRW_DIR}/ush/aqm_utils_python" ]; then + rm -rf ${SRW_DIR}/ush/aqm_utils_python + fi + cp -rp ${SRW_DIR}/sorc/AQM-utils/python_utils ${SRW_DIR}/ush/aqm_utils_python +fi + exit 0 diff --git a/jobs/JREGIONAL_BIAS_CORRECTION_O3 b/jobs/JREGIONAL_BIAS_CORRECTION_O3 deleted file mode 100755 index ddcef59494..0000000000 --- a/jobs/JREGIONAL_BIAS_CORRECTION_O3 +++ /dev/null @@ -1,104 +0,0 @@ -#!/usr/bin/env bash - -# -#----------------------------------------------------------------------- -# -# This script runs BIAS-CORRECTION-O3. -# -#----------------------------------------------------------------------- -# - -# -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_bias_correction_o3" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the J-job script for the task that runs BIAS-CORRECTION-O3. -========================================================================" -# -#----------------------------------------------------------------------- -# -# Set the run directory. -# -#----------------------------------------------------------------------- -# -if [ "${RUN_ENVIR}" = "community" ]; then - DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_BIAS_CORRECTION_O3}" - check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA -fi - -mkdir_vrfy -p ${COMOUTwmo} - -export PARMaqm_utils="${PARMaqm_utils:-${HOMEdir}/sorc/AQM-utils/parm}" - -TMP_STDAY=`${NDATE} -8760 ${PDY}${cyc} | cut -c1-8` # 1 year back -export BC_STDAY=${BC_STDAY:-${TMP_STDAY}} -# -#----------------------------------------------------------------------- -# -# Call the ex-script for this J-job and pass to it the necessary varia- -# bles. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exregional_bias_correction_o3.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# -#----------------------------------------------------------------------- -# -# Run job postamble. -# -#----------------------------------------------------------------------- -# -job_postamble -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 - diff --git a/jobs/JREGIONAL_BIAS_CORRECTION_PM25 b/jobs/JREGIONAL_BIAS_CORRECTION_PM25 deleted file mode 100755 index 7e08b02a12..0000000000 --- a/jobs/JREGIONAL_BIAS_CORRECTION_PM25 +++ /dev/null @@ -1,104 +0,0 @@ -#!/usr/bin/env bash - -# -#----------------------------------------------------------------------- -# -# This script runs BIAS-CORRECTION-PM25. -# -#----------------------------------------------------------------------- -# - -# -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_bias_correction_pm25" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the J-job script for the task that runs BIAS-CORRECTION-PM25. -========================================================================" -# -#----------------------------------------------------------------------- -# -# Set the run directory. -# -#----------------------------------------------------------------------- -# -if [ "${RUN_ENVIR}" = "community" ]; then - DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_BIAS_CORRECTION_PM25}" - check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA -fi - -mkdir_vrfy -p ${COMOUTwmo} - -export PARMaqm_utils="${PARMaqm_utils:-${HOMEdir}/sorc/AQM-utils/parm}" - -TMP_STDAY=`${NDATE} -8760 ${PDY}${cyc} | cut -c1-8` # 1 year back -export BC_STDAY=${BC_STDAY:-${TMP_STDAY}} -# -#----------------------------------------------------------------------- -# -# Call the ex-script for this J-job and pass to it the necessary varia- -# bles. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exregional_bias_correction_pm25.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# -#----------------------------------------------------------------------- -# -# Run job postamble. -# -#----------------------------------------------------------------------- -# -job_postamble -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 - diff --git a/jobs/JREGIONAL_MAKE_ICS b/jobs/JREGIONAL_MAKE_ICS index 1e38f4058d..70306c0a87 100755 --- a/jobs/JREGIONAL_MAKE_ICS +++ b/jobs/JREGIONAL_MAKE_ICS @@ -56,9 +56,9 @@ for the FV3 (in NetCDF format). #----------------------------------------------------------------------- # if [ $RUN_ENVIR = "nco" ]; then - export INPUT_DATA="${COMIN}" + export INPUT_DATA="${COMIN}" else - export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT" + export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT" fi mkdir_vrfy -p "${INPUT_DATA}" # diff --git a/jobs/JREGIONAL_MAKE_LBCS b/jobs/JREGIONAL_MAKE_LBCS index 4c524e26a6..16ac382fee 100755 --- a/jobs/JREGIONAL_MAKE_LBCS +++ b/jobs/JREGIONAL_MAKE_LBCS @@ -56,9 +56,9 @@ hour zero). #----------------------------------------------------------------------- # if [ $RUN_ENVIR = "nco" ]; then - export INPUT_DATA="${COMIN}" + export INPUT_DATA="${COMIN}" else - export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT" + export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT" fi mkdir_vrfy -p "${INPUT_DATA}" # diff --git a/jobs/JREGIONAL_NEXUS_POST_SPLIT b/jobs/JREGIONAL_NEXUS_POST_SPLIT deleted file mode 100755 index 7cb8a55bf0..0000000000 --- a/jobs/JREGIONAL_NEXUS_POST_SPLIT +++ /dev/null @@ -1,103 +0,0 @@ -#!/usr/bin/env bash - -# -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_nexus_post_split" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the J-job script for the task that generates the emission files -using NEXUS which will output for FV3 (in NetCDF format). -========================================================================" -# -#----------------------------------------------------------------------- -# -# Set the name of and create the directory in which the output from this -# script will be placed (if it doesn't already exist). -# -#----------------------------------------------------------------------- -# -if [ $RUN_ENVIR = "nco" ]; then - export INPUT_DATA="${COMIN}" -else - export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT" -fi -mkdir_vrfy -p "${INPUT_DATA}" -# -#----------------------------------------------------------------------- -# -# Set the run directory -# -#----------------------------------------------------------------------- -# -if [ "${RUN_ENVIR}" = "community" ]; then - DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_NEXUS_POST_SPLIT}" - check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA -fi -# -#----------------------------------------------------------------------- -# -# Call the ex-script for this J-job. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exregional_nexus_post_split.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# -#----------------------------------------------------------------------- -# -# Run job postamble. -# -#----------------------------------------------------------------------- -# -job_postamble -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 - diff --git a/jobs/JREGIONAL_POINT_SOURCE b/jobs/JREGIONAL_POINT_SOURCE deleted file mode 100755 index 57000dd599..0000000000 --- a/jobs/JREGIONAL_POINT_SOURCE +++ /dev/null @@ -1,103 +0,0 @@ -#!/usr/bin/env bash - -# -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh -source_config_for_task "task_run_fcst|cpl_aqm_parm|task_point_source" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the J-job script for the task that generates the point source files. -========================================================================" -# -#----------------------------------------------------------------------- -# -# Set the name of and create the directory in which the output from this -# script will be placed (if it doesn't already exist). -# -#----------------------------------------------------------------------- -# -if [ $RUN_ENVIR = "nco" ]; then - export INPUT_DATA="${COMIN}" -else - export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT" -fi -mkdir_vrfy -p "${INPUT_DATA}" -# -#----------------------------------------------------------------------- -# -# Set the run directory -# -#----------------------------------------------------------------------- -# -if [ "${RUN_ENVIR}" = "community" ]; then - DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_POINT_SOURCE}" - check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA -fi -# -# -#----------------------------------------------------------------------- -# -# Call the ex-script for this J-job. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exregional_point_source.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# -#----------------------------------------------------------------------- -# -# Run job postamble. -# -#----------------------------------------------------------------------- -# -job_postamble -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 - diff --git a/jobs/JREGIONAL_POST_STAT_O3 b/jobs/JREGIONAL_POST_STAT_O3 deleted file mode 100755 index a522d00dbb..0000000000 --- a/jobs/JREGIONAL_POST_STAT_O3 +++ /dev/null @@ -1,101 +0,0 @@ -#!/usr/bin/env bash - -# -#----------------------------------------------------------------------- -# -# This script runs POST-STAT-O3. -# -#----------------------------------------------------------------------- -# - -# -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_o3" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the J-job script for the task that runs POST-STAT-O3. -========================================================================" -# -#----------------------------------------------------------------------- -# -# Set the run directory. -# -#----------------------------------------------------------------------- -# -if [ "${RUN_ENVIR}" = "community" ]; then - DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_POST_STAT_O3}" - check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA -fi - -mkdir_vrfy -p ${COMOUTwmo} - -export PARMaqm_utils="${PARMaqm_utils:-${HOMEdir}/sorc/AQM-utils/parm}" -# -#----------------------------------------------------------------------- -# -# Call the ex-script for this J-job and pass to it the necessary varia- -# bles. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exregional_post_stat_o3.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# -#----------------------------------------------------------------------- -# -# Run job postamble. -# -#----------------------------------------------------------------------- -# -job_postamble -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 - diff --git a/jobs/JREGIONAL_POST_STAT_PM25 b/jobs/JREGIONAL_POST_STAT_PM25 deleted file mode 100755 index cd86879a73..0000000000 --- a/jobs/JREGIONAL_POST_STAT_PM25 +++ /dev/null @@ -1,101 +0,0 @@ -#!/usr/bin/env bash - -# -#----------------------------------------------------------------------- -# -# This script runs POST-STAT-PM25. -# -#----------------------------------------------------------------------- -# - -# -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_pm25" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the J-job script for the task that runs POST-UPP-STAT. -========================================================================" -# -#----------------------------------------------------------------------- -# -# Set the run directory. -# -#----------------------------------------------------------------------- -# -DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_POST_STAT_PM25}" -if [ "${RUN_ENVIR}" = "community" ]; then - check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA -fi - -mkdir_vrfy -p ${COMOUTwmo} - -export PARMaqm_utils="${PARMaqm_utils:-${HOMEdir}/sorc/AQM-utils/parm}" -# -#----------------------------------------------------------------------- -# -# Call the ex-script for this J-job and pass to it the necessary varia- -# bles. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exregional_post_stat_pm25.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# -#----------------------------------------------------------------------- -# -# Run job postamble. -# -#----------------------------------------------------------------------- -# -job_postamble -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 - diff --git a/jobs/JREGIONAL_PRE_POST_STAT b/jobs/JREGIONAL_PRE_POST_STAT deleted file mode 100755 index 640c629bce..0000000000 --- a/jobs/JREGIONAL_PRE_POST_STAT +++ /dev/null @@ -1,108 +0,0 @@ -#!/usr/bin/env bash - -# -#----------------------------------------------------------------------- -# -# This script runs PRE-POST-STAT. -# -#----------------------------------------------------------------------- -# - -# -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh -source_config_for_task "task_pre_post_stat" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the J-job script for the task that runs POST-UPP-STAT. -========================================================================" -# -#----------------------------------------------------------------------- -# -# Set the run directory. -# -#----------------------------------------------------------------------- -# -if [ "${RUN_ENVIR}" = "community" ]; then - DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_PRE_POST_STAT}" - check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA -fi -# -#----------------------------------------------------------------------- -# -# Call the ex-script for this J-job and pass to it the necessary varia- -# bles. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exregional_pre_post_stat.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# -#----------------------------------------------------------------------- -# -# Run job postamble. -# -#----------------------------------------------------------------------- -# -job_postamble -# -#----------------------------------------------------------------------- -# -# Remove post_complete flag file. -# -#----------------------------------------------------------------------- -# -post_complete_file="${COMIN}/post_${PDY}${cyc}_task_complete.txt" -if [ -f ${post_complete_file} ] ; then - rm_vrfy -f ${post_complete_file} -fi -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 - diff --git a/jobs/JREGIONAL_RUN_POST b/jobs/JREGIONAL_RUN_POST index b4327667a0..97b100967c 100755 --- a/jobs/JREGIONAL_RUN_POST +++ b/jobs/JREGIONAL_RUN_POST @@ -139,7 +139,7 @@ if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then fcst_len_hrs=$( printf "%03d" "${FCST_LEN_HRS}" ) if [ "${fhr}" = "${fcst_len_hrs}" ]; then - touch "${COMIN}/post_${PDY}${cyc}_task_complete.txt" + touch "${DATAROOT}/DATA_SHARE/${PDY}${cyc}/post_${PDY}${cyc}_task_complete.txt" fi fi fi diff --git a/jobs/JREGIONAL_AQM_ICS b/jobs/JSRW_AQM_ICS similarity index 50% rename from jobs/JREGIONAL_AQM_ICS rename to jobs/JSRW_AQM_ICS index 5c8ba9c8dd..0c4df8aa5b 100755 --- a/jobs/JREGIONAL_AQM_ICS +++ b/jobs/JSRW_AQM_ICS @@ -8,26 +8,30 @@ # #----------------------------------------------------------------------- # - +date +export PS4='+ $SECONDS + ' +set -xue # #----------------------------------------------------------------------- # -# Source the variable definitions file and the bash utility functions. +# Set the NCO standard environment variables (Table 1, pp.4) # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh -source_config_for_task "task_aqm_ics" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh +export USHsrw="${HOMEdir}/ush" +export EXECsrw="${HOMEdir}/exec" +export PARMsrw="${HOMEdir}/parm" +export SCRIPTSsrw="${HOMEdir}/scripts" # #----------------------------------------------------------------------- # -# Save current shell options (in a global array). Then set new options -# for this script/function. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +export USHdir="${USHsrw}" # should be removed later +. ${USHsrw}/source_util_funcs.sh +source_config_for_task "task_aqm_ics" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # @@ -59,54 +63,117 @@ which the model needs. # #----------------------------------------------------------------------- # -# Set the name of and create the directory in which the output from this -# script will be placed (if it doesn't already exist). +# Define job and jobid by default for rocoto # #----------------------------------------------------------------------- # -if [ $RUN_ENVIR = "nco" ]; then - export INPUT_DATA="${COMIN}" +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-${NET_default}}" +export RUN="${RUN:-${RUN_default}}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" else - export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT" + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" fi -mkdir_vrfy -p "${INPUT_DATA}" + +mkdir -p ${COMOUT} + +# Create a teomporary share directory +export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" +mkdir -p ${DATA_SHARE} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY # #----------------------------------------------------------------------- # -# Set the run directory +# Set sub-cycle and ensemble member names in file/diectory names # #----------------------------------------------------------------------- # -if [ "${RUN_ENVIR}" = "community" ]; then - DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_AQM_ICS}" - check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= fi # #----------------------------------------------------------------------- # -# Call the ex-script for this J-job and pass to it the necessary variables. +# Set the name of and create the directory in which the output from this +# script will be placed (if it doesn't already exist). # #----------------------------------------------------------------------- # -$SCRIPTSdir/exregional_aqm_ics.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." +if [ $RUN_ENVIR = "nco" ]; then + export INPUT_DATA="${COMIN}" +else + export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT" +fi +mkdir -p "${INPUT_DATA}" # #----------------------------------------------------------------------- # -# Run job postamble. +# Call the ex-script for this J-job. # #----------------------------------------------------------------------- # -job_postamble +export pgmout="${DATA}/OUTPUT.$$" +env + +${SCRIPTSsrw}/exsrw_aqm_ics.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi # #----------------------------------------------------------------------- # -# Restore the shell options saved at the beginning of this script/function. +# Whether or not working directory DATA should be kept. # #----------------------------------------------------------------------- # -{ restore_shell_opts; } > /dev/null 2>&1 +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date diff --git a/jobs/JREGIONAL_AQM_LBCS b/jobs/JSRW_AQM_LBCS similarity index 50% rename from jobs/JREGIONAL_AQM_LBCS rename to jobs/JSRW_AQM_LBCS index c711f90288..11a1420d5e 100755 --- a/jobs/JREGIONAL_AQM_LBCS +++ b/jobs/JSRW_AQM_LBCS @@ -8,26 +8,30 @@ # #----------------------------------------------------------------------- # - +date +export PS4='+ $SECONDS + ' +set -xue # #----------------------------------------------------------------------- # -# Source the variable definitions file and the bash utility functions. +# Set the NCO standard environment variables (Table 1, pp.4) # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh -source_config_for_task "task_get_extrn_lbcs|task_make_orog|task_make_lbcs|cpl_aqm_parm|task_aqm_lbcs" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh +export USHsrw="${HOMEdir}/ush" +export EXECsrw="${HOMEdir}/exec" +export PARMsrw="${HOMEdir}/parm" +export SCRIPTSsrw="${HOMEdir}/scripts" # #----------------------------------------------------------------------- # -# Save current shell options (in a global array). Then set new options -# for this script/function. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +export USHdir="${USHsrw}" # should be removed later +. ${USHsrw}/source_util_funcs.sh +source_config_for_task "task_get_extrn_lbcs|task_make_orog|task_make_lbcs|cpl_aqm_parm|task_aqm_lbcs" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # @@ -59,55 +63,118 @@ which the model needs. # #----------------------------------------------------------------------- # -# Set the name of and create the directory in which the output from this -# script will be placed (if it doesn't already exist). +# Define job and jobid by default for rocoto # #----------------------------------------------------------------------- # -if [ $RUN_ENVIR = "nco" ]; then - export INPUT_DATA="${COMIN}" -else - export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT" +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" fi -mkdir_vrfy -p "${INPUT_DATA}" # #----------------------------------------------------------------------- # -# Set the run directory +# Create a temp working directory (DATA) and cd into it. # #----------------------------------------------------------------------- # -if [ "${RUN_ENVIR}" = "community" ]; then - DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_AQM_LBCS}" - check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-${NET_default}}" +export RUN="${RUN:-${RUN_default}}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" fi +export COMINgefs="${COMINgefs:-${COMINgefs_default}}" + +mkdir -p ${COMOUT} + +# Create a teomporary share directory +export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" +mkdir -p ${DATA_SHARE} +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY # #----------------------------------------------------------------------- # -# Call the ex-script for this J-job and pass to it the necessary variables. +# Set sub-cycle and ensemble member names in file/diectory names # #----------------------------------------------------------------------- # -$SCRIPTSdir/exregional_aqm_lbcs.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= +fi +# +#----------------------------------------------------------------------- +# +# Set the name of and create the directory in which the output from this +# script will be placed (if it doesn't already exist). +# +#----------------------------------------------------------------------- +# +if [ $RUN_ENVIR = "nco" ]; then + export INPUT_DATA="${COMIN}" +else + export INPUT_DATA="${EXPTDIR}/${PDY}${cyc}${SLASH_ENSMEM_SUBDIR}/INPUT" +fi +mkdir -p "${INPUT_DATA}" # #----------------------------------------------------------------------- # -# Run job postamble. +# Call the ex-script for this J-job. # #----------------------------------------------------------------------- # -job_postamble +export pgmout="${DATA}/OUTPUT.$$" +env + +${SCRIPTSsrw}/exsrw_aqm_lbcs.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi # #----------------------------------------------------------------------- # -# Restore the shell options saved at the beginning of this script/function. +# Whether or not working directory DATA should be kept. # #----------------------------------------------------------------------- # -{ restore_shell_opts; } > /dev/null 2>&1 +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date diff --git a/jobs/JSRW_BIAS_CORRECTION_O3 b/jobs/JSRW_BIAS_CORRECTION_O3 new file mode 100755 index 0000000000..3ab2f2d40f --- /dev/null +++ b/jobs/JSRW_BIAS_CORRECTION_O3 @@ -0,0 +1,161 @@ +#!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This script runs BIAS-CORRECTION-O3. +# +#----------------------------------------------------------------------- +# +date +export PS4='+ $SECONDS + ' +set -xue +# +#----------------------------------------------------------------------- +# +# Set the NCO standard environment variables (Table 1, pp.4) +# +#----------------------------------------------------------------------- +# +export USHsrw="${HOMEdir}/ush" +export EXECsrw="${HOMEdir}/exec" +export PARMsrw="${HOMEdir}/parm" +export SCRIPTSsrw="${HOMEdir}/scripts" +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +export USHdir="${USHsrw}" # should be removed later +. ${USHsrw}/source_util_funcs.sh +source_config_for_task "cpl_aqm_parm|task_run_post|task_bias_correction_o3" ${GLOBAL_VAR_DEFNS_FP} +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the J-job script for the task that runs BIAS-CORRECTION-O3. +========================================================================" +# +#----------------------------------------------------------------------- +# +# Define job and jobid by default for rocoto +# +#----------------------------------------------------------------------- +# +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-${NET_default}}" +export RUN="${RUN:-${RUN_default}}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" +fi + +mkdir -p ${COMOUT} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY +# +#----------------------------------------------------------------------- +# +# Set sub-cycle and ensemble member names in file/diectory names +# +#----------------------------------------------------------------------- +# +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= +fi + +TMP_STDAY=`${NDATE} -8760 ${PDY}${cyc} | cut -c1-8` # 1 year back +export BC_STDAY=${BC_STDAY:-${TMP_STDAY}} +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job. +# +#----------------------------------------------------------------------- +# +export pgmout="${DATA}/OUTPUT.$$" +env + +${SCRIPTSsrw}/exsrw_bias_correction_o3.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi +# +#----------------------------------------------------------------------- +# +# Whether or not working directory DATA should be kept. +# +#----------------------------------------------------------------------- +# +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date + diff --git a/jobs/JSRW_BIAS_CORRECTION_PM25 b/jobs/JSRW_BIAS_CORRECTION_PM25 new file mode 100755 index 0000000000..42210e7f29 --- /dev/null +++ b/jobs/JSRW_BIAS_CORRECTION_PM25 @@ -0,0 +1,161 @@ +#!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This script runs BIAS-CORRECTION-PM25. +# +#----------------------------------------------------------------------- +# +date +export PS4='+ $SECONDS + ' +set -xue +# +#----------------------------------------------------------------------- +# +# Set the NCO standard environment variables (Table 1, pp.4) +# +#----------------------------------------------------------------------- +# +export USHsrw="${HOMEdir}/ush" +export EXECsrw="${HOMEdir}/exec" +export PARMsrw="${HOMEdir}/parm" +export SCRIPTSsrw="${HOMEdir}/scripts" +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +export USHdir="${USHsrw}" # should be removed later +. ${USHsrw}/source_util_funcs.sh +source_config_for_task "cpl_aqm_parm|task_run_post|task_bias_correction_pm25" ${GLOBAL_VAR_DEFNS_FP} +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the J-job script for the task that runs BIAS-CORRECTION-PM25. +========================================================================" +# +#----------------------------------------------------------------------- +# +# Define job and jobid by default for rocoto +# +#----------------------------------------------------------------------- +# +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-${NET_default}}" +export RUN="${RUN:-${RUN_default}}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" +fi + +mkdir -p ${COMOUT} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY +# +#----------------------------------------------------------------------- +# +# Set sub-cycle and ensemble member names in file/diectory names +# +#----------------------------------------------------------------------- +# +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= +fi + +TMP_STDAY=`${NDATE} -8760 ${PDY}${cyc} | cut -c1-8` # 1 year back +export BC_STDAY=${BC_STDAY:-${TMP_STDAY}} +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job. +# +#----------------------------------------------------------------------- +# +export pgmout="${DATA}/OUTPUT.$$" +env + +${SCRIPTSsrw}/exsrw_bias_correction_pm25.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi +# +#----------------------------------------------------------------------- +# +# Whether or not working directory DATA should be kept. +# +#----------------------------------------------------------------------- +# +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date + diff --git a/jobs/JREGIONAL_FIRE_EMISSION b/jobs/JSRW_FIRE_EMISSION similarity index 57% rename from jobs/JREGIONAL_FIRE_EMISSION rename to jobs/JSRW_FIRE_EMISSION index fdb6e57b0a..ae0343e60e 100755 --- a/jobs/JREGIONAL_FIRE_EMISSION +++ b/jobs/JSRW_FIRE_EMISSION @@ -7,26 +7,30 @@ # #----------------------------------------------------------------------- # - +date +export PS4='+ $SECONDS + ' +set -xue # #----------------------------------------------------------------------- # -# Source the variable definitions file and the bash utility functions. +# Set the NCO standard environment variables (Table 1, pp.4) # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_fire_emission" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh +export USHsrw="${HOMEdir}/ush" +export EXECsrw="${HOMEdir}/exec" +export PARMsrw="${HOMEdir}/parm" +export SCRIPTSsrw="${HOMEdir}/scripts" # #----------------------------------------------------------------------- # -# Save current shell options (in a global array). Then set new options -# for this script/function. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +export USHdir="${USHsrw}" # should be removed later +. ${USHsrw}/source_util_funcs.sh +source_config_for_task "cpl_aqm_parm|task_fire_emission" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # @@ -57,78 +61,127 @@ emission data files from disk, or HPSS. # #----------------------------------------------------------------------- # -# Set the external model start time +# Define job and jobid by default for rocoto # #----------------------------------------------------------------------- # -export TIME_OFFSET_HRS=${AQM_FIRE_FILE_OFFSET_HRS:-0} -yyyymmdd=${PDY} -hh=${cyc} -export FIRE_FILE_CDATE=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC - ${TIME_OFFSET_HRS} hours" "+%Y%m%d%H" ) - +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi # #----------------------------------------------------------------------- # -# Check whether FIRE EMISSION data files are available on the specified -# cycle date and time on HPSS (FIRE_FILE_CDATE). +# Create a temp working directory (DATA) and cd into it. # #----------------------------------------------------------------------- # -CDATE_min="2022101500" -if [ "$FIRE_FILE_CDATE" -lt "$CDATE_min" ]; then - print_info_msg " -======================================================================== -RAVE fire emission data are not available on HPSS for this date. -CDATE: \"${FIRE_FILE_CDATE}\" -CDATE_min: \"${CDATE_min}\" -========================================================================" +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-${NET_default}}" +export RUN="${RUN:-${RUN_default}}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" fi + +mkdir -p ${COMOUT} + +export COMINfire="${COMINfire:-${COMINfire_default}}" + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY # #----------------------------------------------------------------------- # -# Set the run directory +# Set sub-cycle and ensemble member names in file/diectory names # #----------------------------------------------------------------------- # -if [ "${RUN_ENVIR}" = "community" ]; then - DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_FIRE_EMISSION}" - check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= fi # #----------------------------------------------------------------------- # -# Create the directory where the RAVE fire emission files should be stored +# Set the external model start time # #----------------------------------------------------------------------- # -export FIRE_EMISSION_STAGING_DIR="${FIRE_EMISSION_STAGING_DIR:-${COMIN}/FIRE_EMISSION}" -mkdir_vrfy -p "${FIRE_EMISSION_STAGING_DIR}" +export TIME_OFFSET_HRS=${AQM_FIRE_FILE_OFFSET_HRS:-0} +export FIRE_FILE_CDATE=`$NDATE -${TIME_OFFSET_HRS} ${PDY}${cyc}` # #----------------------------------------------------------------------- # -# Call the ex-script for this J-job and pass to it the necessary variables. +# Check whether FIRE EMISSION data files are available on the specified +# cycle date and time on HPSS (FIRE_FILE_CDATE). # #----------------------------------------------------------------------- # -$SCRIPTSdir/exregional_fire_emission.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." +CDATE_min="2022101500" +if [ "$FIRE_FILE_CDATE" -lt "$CDATE_min" ]; then + print_info_msg " +======================================================================== +RAVE fire emission data are not available on HPSS for this date. +CDATE: \"${FIRE_FILE_CDATE}\" +CDATE_min: \"${CDATE_min}\" +========================================================================" +fi # #----------------------------------------------------------------------- # -# Run job postamble. +# Call the ex-script for this J-job. # #----------------------------------------------------------------------- # -job_postamble +export pgmout="${DATA}/OUTPUT.$$" +env + +${SCRIPTSsrw}/exsrw_fire_emission.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi # #----------------------------------------------------------------------- # -# Restore the shell options saved at the beginning of this script/function. +# Whether or not working directory DATA should be kept. # #----------------------------------------------------------------------- # -{ restore_shell_opts; } > /dev/null 2>&1 +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date diff --git a/jobs/JREGIONAL_NEXUS_EMISSION b/jobs/JSRW_NEXUS_EMISSION similarity index 50% rename from jobs/JREGIONAL_NEXUS_EMISSION rename to jobs/JSRW_NEXUS_EMISSION index 915de0f054..33f1aca757 100755 --- a/jobs/JREGIONAL_NEXUS_EMISSION +++ b/jobs/JSRW_NEXUS_EMISSION @@ -3,30 +3,34 @@ # #----------------------------------------------------------------------- # -# This script generate NEXUS emission netcdf file. +# This script generate individual NEXUS emission netcdf file. # #----------------------------------------------------------------------- # - +date +export PS4='+ $SECONDS + ' +set -xue # #----------------------------------------------------------------------- # -# Source the variable definitions file and the bash utility functions. +# Set the NCO standard environment variables (Table 1, pp.4) # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh -source_config_for_task "task_run_fcst|cpl_aqm_parm|task_nexus_emission" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh +export USHsrw="${HOMEdir}/ush" +export EXECsrw="${HOMEdir}/exec" +export PARMsrw="${HOMEdir}/parm" +export SCRIPTSsrw="${HOMEdir}/scripts" # #----------------------------------------------------------------------- # -# Save current shell options (in a global array). Then set new options -# for this script/function. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +export USHdir="${USHsrw}" # should be removed later +. ${USHsrw}/source_util_funcs.sh +source_config_for_task "task_run_fcst|cpl_aqm_parm|task_nexus_emission" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # @@ -57,55 +61,103 @@ using NEXUS which will output for FV3 (in NetCDF format). # #----------------------------------------------------------------------- # -# Set the name of and create the directory in which the output from this -# script will be placed (if it doesn't already exist). +# Define job and jobid by default for rocoto # #----------------------------------------------------------------------- # -if [ $RUN_ENVIR = "nco" ]; then - export INPUT_DATA="${COMIN}/NEXUS" -else - export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/NEXUS" +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" fi -mkdir_vrfy -p "${INPUT_DATA}" # #----------------------------------------------------------------------- # -# Set the run directory +# Create a temp working directory (DATA) and cd into it. # #----------------------------------------------------------------------- # -if [ "${RUN_ENVIR}" = "community" ]; then - DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_NEXUS_EMISSION_${nspt}}" - check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-${NET_default}}" +export RUN="${RUN:-${RUN_default}}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" fi + +mkdir -p ${COMOUT} + +# Create a teomporary share directory +export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" +mkdir -p ${DATA_SHARE} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY # #----------------------------------------------------------------------- # -# Call the ex-script for this J-job. +# Set sub-cycle and ensemble member names in file/diectory names # #----------------------------------------------------------------------- # -$SCRIPTSdir/exregional_nexus_emission.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= +fi # #----------------------------------------------------------------------- # -# Run job postamble. +# Call the ex-script for this J-job. # #----------------------------------------------------------------------- # -job_postamble +export pgmout="${DATA}/OUTPUT.$$" +env + +${SCRIPTSsrw}/exsrw_nexus_emission.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi # #----------------------------------------------------------------------- # -# Restore the shell options saved at the beginning of this script/func- -# tion. +# Whether or not working directory DATA should be kept. # #----------------------------------------------------------------------- # -{ restore_shell_opts; } > /dev/null 2>&1 +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date diff --git a/jobs/JREGIONAL_NEXUS_GFS_SFC b/jobs/JSRW_NEXUS_GFS_SFC similarity index 51% rename from jobs/JREGIONAL_NEXUS_GFS_SFC rename to jobs/JSRW_NEXUS_GFS_SFC index 5fc05f86db..89d84c740d 100755 --- a/jobs/JREGIONAL_NEXUS_GFS_SFC +++ b/jobs/JSRW_NEXUS_GFS_SFC @@ -7,26 +7,30 @@ # #----------------------------------------------------------------------- # - +date +export PS4='+ $SECONDS + ' +set -xue # #----------------------------------------------------------------------- # -# Source the variable definitions file and the bash utility functions. +# Set the NCO standard environment variables (Table 1, pp.4) # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_nexus_gfs_sfc" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh "TRUE" +export USHsrw="${HOMEdir}/ush" +export EXECsrw="${HOMEdir}/exec" +export PARMsrw="${HOMEdir}/parm" +export SCRIPTSsrw="${HOMEdir}/scripts" # #----------------------------------------------------------------------- # -# Save current shell options (in a global array). Then set new options -# for this script/function. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +export USHdir="${USHsrw}" # should be removed later +. ${USHsrw}/source_util_funcs.sh +source_config_for_task "cpl_aqm_parm|task_nexus_gfs_sfc" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # @@ -57,15 +61,102 @@ data files from disk, or HPSS. # #----------------------------------------------------------------------- # +# Define job and jobid by default for rocoto +# +#----------------------------------------------------------------------- +# +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-${NET_default}}" +export RUN="${RUN:-${RUN_default}}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" +fi + +mkdir -p ${COMOUT} + +# Create a teomporary share directory +export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" +mkdir -p ${DATA_SHARE} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY +# +#----------------------------------------------------------------------- +# +# Set sub-cycle and ensemble member names in file/diectory names +# +#----------------------------------------------------------------------- +# +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= +fi +# +#----------------------------------------------------------------------- +# +# Set sub-cycle and ensemble member names in file/diectory names +# +#----------------------------------------------------------------------- +# +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= +fi +# +#----------------------------------------------------------------------- +# # Set the external model start time # #----------------------------------------------------------------------- # export TIME_OFFSET_HRS=${NEXUS_GFS_SFC_OFFSET_HRS:-0} -yyyymmdd=${PDY} -hh=${cyc} -export GFS_SFC_CDATE=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC - ${TIME_OFFSET_HRS} hours" "+%Y%m%d%H" ) - +export GFS_SFC_CDATE=`$NDATE -${TIME_OFFSET_HRS} ${PDY}${cyc}` # #----------------------------------------------------------------------- # @@ -91,40 +182,28 @@ fi # #----------------------------------------------------------------------- # -# Set the run directory +# Call the ex-script for this J-job. # #----------------------------------------------------------------------- # -if [ "${RUN_ENVIR}" = "community" ]; then - DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_NEXUS_GFS_SFC}" - check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA +export pgmout="${DATA}/OUTPUT.$$" +env + +$SCRIPTSsrw/exsrw_nexus_gfs_sfc.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout fi # #----------------------------------------------------------------------- # -# Call the ex-script for this J-job and pass to it the necessary variables. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exregional_nexus_gfs_sfc.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# -#----------------------------------------------------------------------- -# -# Run job postamble. +# Whether or not working directory DATA should be kept. # #----------------------------------------------------------------------- # -job_postamble "FALSE" -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/function. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date diff --git a/jobs/JSRW_NEXUS_POST_SPLIT b/jobs/JSRW_NEXUS_POST_SPLIT new file mode 100755 index 0000000000..6e5a0a259a --- /dev/null +++ b/jobs/JSRW_NEXUS_POST_SPLIT @@ -0,0 +1,163 @@ +#!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This script generate final NEXUS emission netcdf file. +# +#----------------------------------------------------------------------- +# +date +export PS4='+ $SECONDS + ' +set -xue +# +#----------------------------------------------------------------------- +# +# Set the NCO standard environment variables (Table 1, pp.4) +# +#----------------------------------------------------------------------- +# +export USHsrw="${HOMEdir}/ush" +export EXECsrw="${HOMEdir}/exec" +export PARMsrw="${HOMEdir}/parm" +export SCRIPTSsrw="${HOMEdir}/scripts" +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +export USHdir="${USHsrw}" # should be removed later +. ${USHsrw}/source_util_funcs.sh +source_config_for_task "cpl_aqm_parm|task_nexus_post_split" ${GLOBAL_VAR_DEFNS_FP} +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the J-job script for the task that generates the emission files +using NEXUS which will output for FV3 (in NetCDF format). +========================================================================" +# +#----------------------------------------------------------------------- +# +# Define job and jobid by default for rocoto +# +#----------------------------------------------------------------------- +# +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-${NET_default}}" +export RUN="${RUN:-${RUN_default}}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" +fi + +mkdir -p ${COMOUT} + +# Create a teomporary share directory +export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" +mkdir -p ${DATA_SHARE} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY +# +#----------------------------------------------------------------------- +# +# Set sub-cycle and ensemble member names in file/diectory names +# +#----------------------------------------------------------------------- +# +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= +fi +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job. +# +#----------------------------------------------------------------------- +# +export pgmout="${DATA}/OUTPUT.$$" +env + +${SCRIPTSsrw}/exsrw_nexus_post_split.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi +# +#----------------------------------------------------------------------- +# +# Whether or not working directory DATA should be kept. +# +#----------------------------------------------------------------------- +# +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date + diff --git a/jobs/JSRW_POINT_SOURCE b/jobs/JSRW_POINT_SOURCE new file mode 100755 index 0000000000..a112a2d275 --- /dev/null +++ b/jobs/JSRW_POINT_SOURCE @@ -0,0 +1,159 @@ +#!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This script generate POINT SOURCE EMISSION file. +# +#----------------------------------------------------------------------- +# +date +export PS4='+ $SECONDS + ' +set -xue +# +#----------------------------------------------------------------------- +# +# Set the NCO standard environment variables (Table 1, pp.4) +# +#----------------------------------------------------------------------- +# +export USHsrw="${HOMEdir}/ush" +export EXECsrw="${HOMEdir}/exec" +export PARMsrw="${HOMEdir}/parm" +export SCRIPTSsrw="${HOMEdir}/scripts" +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +export USHdir="${USHsrw}" # should be removed later +. ${USHsrw}/source_util_funcs.sh +source_config_for_task "task_run_fcst|cpl_aqm_parm|task_point_source" ${GLOBAL_VAR_DEFNS_FP} +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the J-job script for the task that generates the point source files. +========================================================================" +# +#----------------------------------------------------------------------- +# +# Define job and jobid by default for rocoto +# +#----------------------------------------------------------------------- +# +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-${NET_default}}" +export RUN="${RUN:-${RUN_default}}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" +fi + +mkdir -p ${COMOUT} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY +# +#----------------------------------------------------------------------- +# +# Set sub-cycle and ensemble member names in file/diectory names +# +#----------------------------------------------------------------------- +# +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= +fi +# +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job. +# +#----------------------------------------------------------------------- +# +export pgmout="${DATA}/OUTPUT.$$" +env + +$SCRIPTSsrw/exsrw_point_source.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi +# +#----------------------------------------------------------------------- +# +# Whether or not working directory DATA should be kept. +# +#----------------------------------------------------------------------- +# +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date + diff --git a/jobs/JSRW_POST_STAT_O3 b/jobs/JSRW_POST_STAT_O3 new file mode 100755 index 0000000000..8924cba9e5 --- /dev/null +++ b/jobs/JSRW_POST_STAT_O3 @@ -0,0 +1,160 @@ +#!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This script runs POST-STAT-O3. +# +#----------------------------------------------------------------------- +# +date +export PS4='+ $SECONDS + ' +set -xue +# +#----------------------------------------------------------------------- +# +# Set the NCO standard environment variables (Table 1, pp.4) +# +#----------------------------------------------------------------------- +# +export USHsrw="${HOMEdir}/ush" +export EXECsrw="${HOMEdir}/exec" +export PARMsrw="${HOMEdir}/parm" +export SCRIPTSsrw="${HOMEdir}/scripts" +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +export USHdir="${USHsrw}" # should be removed later +. ${USHsrw}/source_util_funcs.sh +source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_o3" ${GLOBAL_VAR_DEFNS_FP} +. $USHdir/job_preamble.sh +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the J-job script for the task that runs POST-STAT-O3. +========================================================================" +# +#----------------------------------------------------------------------- +# +# Define job and jobid by default for rocoto +# +#----------------------------------------------------------------------- +# +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-${NET_default}}" +export RUN="${RUN:-${RUN_default}}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" +fi + +mkdir -p ${COMOUT} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY +# +#----------------------------------------------------------------------- +# +# Set sub-cycle and ensemble member names in file/diectory names +# +#----------------------------------------------------------------------- +# +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= +fi +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job. +# +#----------------------------------------------------------------------- +# +export pgmout="${DATA}/OUTPUT.$$" +env + +${SCRIPTSsrw}/exsrw_post_stat_o3.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi +# +#----------------------------------------------------------------------- +# +# Whether or not working directory DATA should be kept. +# +#----------------------------------------------------------------------- +# +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date + + diff --git a/jobs/JSRW_POST_STAT_PM25 b/jobs/JSRW_POST_STAT_PM25 new file mode 100755 index 0000000000..83434fa8c7 --- /dev/null +++ b/jobs/JSRW_POST_STAT_PM25 @@ -0,0 +1,158 @@ +#!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This script runs POST-STAT-PM25. +# +#----------------------------------------------------------------------- +# +date +export PS4='+ $SECONDS + ' +set -xue +# +#----------------------------------------------------------------------- +# +# Set the NCO standard environment variables (Table 1, pp.4) +# +#----------------------------------------------------------------------- +# +export USHsrw="${HOMEdir}/ush" +export EXECsrw="${HOMEdir}/exec" +export PARMsrw="${HOMEdir}/parm" +export SCRIPTSsrw="${HOMEdir}/scripts" +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +export USHdir="${USHsrw}" # should be removed later +. ${USHsrw}/source_util_funcs.sh +source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_pm25" ${GLOBAL_VAR_DEFNS_FP} +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the J-job script for the task that runs POST-UPP-STAT. +========================================================================" +# +#----------------------------------------------------------------------- +# +# Define job and jobid by default for rocoto +# +#----------------------------------------------------------------------- +# +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-${NET_default}}" +export RUN="${RUN:-${RUN_default}}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" +fi + +mkdir -p ${COMOUT} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY +# +#----------------------------------------------------------------------- +# +# Set sub-cycle and ensemble member names in file/diectory names +# +#----------------------------------------------------------------------- +# +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= +fi +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job. +# +#----------------------------------------------------------------------- +# +export pgmout="${DATA}/OUTPUT.$$" +env + +${SCRIPTSsrw}/exsrw_post_stat_pm25.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi +# +#----------------------------------------------------------------------- +# +# Whether or not working directory DATA should be kept. +# +#----------------------------------------------------------------------- +# +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date + diff --git a/jobs/JSRW_PRE_POST_STAT b/jobs/JSRW_PRE_POST_STAT new file mode 100755 index 0000000000..12561085c2 --- /dev/null +++ b/jobs/JSRW_PRE_POST_STAT @@ -0,0 +1,173 @@ +#!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This script runs PRE-POST-STAT. +# +#----------------------------------------------------------------------- +# +date +export PS4='+ $SECONDS + ' +set -xue +# +#----------------------------------------------------------------------- +# +# Set the NCO standard environment variables (Table 1, pp.4) +# +#----------------------------------------------------------------------- +# +export USHsrw="${HOMEdir}/ush" +export EXECsrw="${HOMEdir}/exec" +export PARMsrw="${HOMEdir}/parm" +export SCRIPTSsrw="${HOMEdir}/scripts" +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +export USHdir="${USHsrw}" # should be removed later +. ${USHsrw}/source_util_funcs.sh +source_config_for_task "task_pre_post_stat" ${GLOBAL_VAR_DEFNS_FP} +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the J-job script for the task that runs PRE-POST-STAT. +========================================================================" +# +#----------------------------------------------------------------------- +# +# Define job and jobid by default for rocoto +# +#----------------------------------------------------------------------- +# +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-${NET_default}}" +export RUN="${RUN:-${RUN_default}}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" +fi + +mkdir -p ${COMOUT} + +# Create a teomporary share directory +export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" +mkdir -p ${DATA_SHARE} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY +# +#----------------------------------------------------------------------- +# +# Set sub-cycle and ensemble member names in file/diectory names +# +#----------------------------------------------------------------------- +# +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= +fi +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job. +# +#----------------------------------------------------------------------- +# +export pgmout="${DATA}/OUTPUT.$$" +env + +${SCRIPTSsrw}/exsrw_pre_post_stat.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi +# +#----------------------------------------------------------------------- +# +# Remove post_complete flag file. +# +#----------------------------------------------------------------------- +# +post_complete_file="${DATA_SHARE}/post_${PDY}${cyc}_task_complete.txt" +if [ -f ${post_complete_file} ] ; then + rm -f ${post_complete_file} +fi +# +#----------------------------------------------------------------------- +# +# Whether or not working directory DATA should be kept. +# +#----------------------------------------------------------------------- +# +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date + diff --git a/modulefiles/build_hera_intel.lua b/modulefiles/build_hera_intel.lua index 2121d303dc..061feef67b 100644 --- a/modulefiles/build_hera_intel.lua +++ b/modulefiles/build_hera_intel.lua @@ -27,6 +27,7 @@ load("srw_common") load(pathJoin("nccmp", os.getenv("nccmp_ver") or "1.9.0.1")) load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) +load(pathJoin("prod_util", os.getenv("prod_util_ver") or "1.2.2")) setenv("CMAKE_C_COMPILER","mpiicc") setenv("CMAKE_CXX_COMPILER","mpiicpc") diff --git a/modulefiles/python_srw_cmaq.lua b/modulefiles/python_srw_aqm.lua similarity index 100% rename from modulefiles/python_srw_cmaq.lua rename to modulefiles/python_srw_aqm.lua diff --git a/modulefiles/tasks/cheyenne/aqm_ics.local.lua b/modulefiles/tasks/cheyenne/aqm_ics.local.lua index 1040aab9a6..9c9f0ca3d5 100644 --- a/modulefiles/tasks/cheyenne/aqm_ics.local.lua +++ b/modulefiles/tasks/cheyenne/aqm_ics.local.lua @@ -1,3 +1,3 @@ load(pathJoin("cmake", os.getenv("cmake_ver") or "3.22.0")) load("nco/4.9.5") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/cheyenne/aqm_lbcs.local.lua b/modulefiles/tasks/cheyenne/aqm_lbcs.local.lua index 1040aab9a6..9c9f0ca3d5 100644 --- a/modulefiles/tasks/cheyenne/aqm_lbcs.local.lua +++ b/modulefiles/tasks/cheyenne/aqm_lbcs.local.lua @@ -1,3 +1,3 @@ load(pathJoin("cmake", os.getenv("cmake_ver") or "3.22.0")) load("nco/4.9.5") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/cheyenne/fire_emission.local.lua b/modulefiles/tasks/cheyenne/fire_emission.local.lua index b62670156f..86252a9a4f 100644 --- a/modulefiles/tasks/cheyenne/fire_emission.local.lua +++ b/modulefiles/tasks/cheyenne/fire_emission.local.lua @@ -1,2 +1,2 @@ load("ncarenv") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/cheyenne/nexus_emission.local.lua b/modulefiles/tasks/cheyenne/nexus_emission.local.lua index c46ead59a9..3c690fa12a 100644 --- a/modulefiles/tasks/cheyenne/nexus_emission.local.lua +++ b/modulefiles/tasks/cheyenne/nexus_emission.local.lua @@ -2,4 +2,4 @@ load("nco/4.9.5") load("mpt/2.25") load("ncarenv") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/cheyenne/nexus_gfs_sfc.local.lua b/modulefiles/tasks/cheyenne/nexus_gfs_sfc.local.lua index b62670156f..86252a9a4f 100644 --- a/modulefiles/tasks/cheyenne/nexus_gfs_sfc.local.lua +++ b/modulefiles/tasks/cheyenne/nexus_gfs_sfc.local.lua @@ -1,2 +1,2 @@ load("ncarenv") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/cheyenne/nexus_post_split.local.lua b/modulefiles/tasks/cheyenne/nexus_post_split.local.lua index c957eff552..e3f4bbe95d 100644 --- a/modulefiles/tasks/cheyenne/nexus_post_split.local.lua +++ b/modulefiles/tasks/cheyenne/nexus_post_split.local.lua @@ -1,3 +1,3 @@ load(pathJoin("nco", os.getenv("nco_ver") or "4.9.5")) load("ncarenv") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/cheyenne/point_source.local.lua b/modulefiles/tasks/cheyenne/point_source.local.lua index b62670156f..86252a9a4f 100644 --- a/modulefiles/tasks/cheyenne/point_source.local.lua +++ b/modulefiles/tasks/cheyenne/point_source.local.lua @@ -1,2 +1,2 @@ load("ncarenv") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/cheyenne/pre_post_stat.local.lua b/modulefiles/tasks/cheyenne/pre_post_stat.local.lua index 7dcdc5969b..042eb2f732 100644 --- a/modulefiles/tasks/cheyenne/pre_post_stat.local.lua +++ b/modulefiles/tasks/cheyenne/pre_post_stat.local.lua @@ -1,2 +1,2 @@ load("nco/4.9.5") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/aqm_ics.local.lua b/modulefiles/tasks/derecho/aqm_ics.local.lua index 26b28db2c5..30f1157fbb 100644 --- a/modulefiles/tasks/derecho/aqm_ics.local.lua +++ b/modulefiles/tasks/derecho/aqm_ics.local.lua @@ -1,2 +1,2 @@ load("nco/5.0.6") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/aqm_lbcs.local.lua b/modulefiles/tasks/derecho/aqm_lbcs.local.lua index 26b28db2c5..30f1157fbb 100644 --- a/modulefiles/tasks/derecho/aqm_lbcs.local.lua +++ b/modulefiles/tasks/derecho/aqm_lbcs.local.lua @@ -1,2 +1,2 @@ load("nco/5.0.6") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/fire_emission.local.lua b/modulefiles/tasks/derecho/fire_emission.local.lua index b62670156f..86252a9a4f 100644 --- a/modulefiles/tasks/derecho/fire_emission.local.lua +++ b/modulefiles/tasks/derecho/fire_emission.local.lua @@ -1,2 +1,2 @@ load("ncarenv") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/nexus_emission.local.lua b/modulefiles/tasks/derecho/nexus_emission.local.lua index 09f38a17dd..e7f216375c 100644 --- a/modulefiles/tasks/derecho/nexus_emission.local.lua +++ b/modulefiles/tasks/derecho/nexus_emission.local.lua @@ -1,4 +1,4 @@ load("nco/5.0.6") load("ncarenv") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua b/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua index b62670156f..86252a9a4f 100644 --- a/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua +++ b/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua @@ -1,2 +1,2 @@ load("ncarenv") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/nexus_post_split.local.lua b/modulefiles/tasks/derecho/nexus_post_split.local.lua index a03758c9c6..07d126ff0b 100644 --- a/modulefiles/tasks/derecho/nexus_post_split.local.lua +++ b/modulefiles/tasks/derecho/nexus_post_split.local.lua @@ -1,3 +1,3 @@ load(pathJoin("nco", os.getenv("nco_ver") or "5.0.6")) load("ncarenv") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/point_source.local.lua b/modulefiles/tasks/derecho/point_source.local.lua index b62670156f..86252a9a4f 100644 --- a/modulefiles/tasks/derecho/point_source.local.lua +++ b/modulefiles/tasks/derecho/point_source.local.lua @@ -1,2 +1,2 @@ load("ncarenv") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/pre_post_stat.local.lua b/modulefiles/tasks/derecho/pre_post_stat.local.lua index 26b28db2c5..30f1157fbb 100644 --- a/modulefiles/tasks/derecho/pre_post_stat.local.lua +++ b/modulefiles/tasks/derecho/pre_post_stat.local.lua @@ -1,2 +1,2 @@ load("nco/5.0.6") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/hera/aqm_ics.local.lua b/modulefiles/tasks/hera/aqm_ics.local.lua index 0e7132d749..2eb2ea2ee0 100644 --- a/modulefiles/tasks/hera/aqm_ics.local.lua +++ b/modulefiles/tasks/hera/aqm_ics.local.lua @@ -1,2 +1,2 @@ -load("python_srw_cmaq") -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) +load("python_srw_aqm") +load(pathJoin("nco", os.getenv("nco_ver") or "5.1.6")) diff --git a/modulefiles/tasks/hera/aqm_lbcs.local.lua b/modulefiles/tasks/hera/aqm_lbcs.local.lua index 0e7132d749..2eb2ea2ee0 100644 --- a/modulefiles/tasks/hera/aqm_lbcs.local.lua +++ b/modulefiles/tasks/hera/aqm_lbcs.local.lua @@ -1,2 +1,2 @@ -load("python_srw_cmaq") -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) +load("python_srw_aqm") +load(pathJoin("nco", os.getenv("nco_ver") or "5.1.6")) diff --git a/modulefiles/tasks/hera/fire_emission.local.lua b/modulefiles/tasks/hera/fire_emission.local.lua index 8aa737aa65..68d6f14832 100644 --- a/modulefiles/tasks/hera/fire_emission.local.lua +++ b/modulefiles/tasks/hera/fire_emission.local.lua @@ -1,3 +1,3 @@ load("hpss") -load("python_srw_cmaq") -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) +load("python_srw_aqm") +load(pathJoin("nco", os.getenv("nco_ver") or "5.1.6")) diff --git a/modulefiles/tasks/hera/nexus_emission.local.lua b/modulefiles/tasks/hera/nexus_emission.local.lua index c7ac9dcb90..d1f95e6d31 100644 --- a/modulefiles/tasks/hera/nexus_emission.local.lua +++ b/modulefiles/tasks/hera/nexus_emission.local.lua @@ -1,2 +1,2 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("python_srw_cmaq") +load(pathJoin("nco", os.getenv("nco_ver") or "5.1.6")) +load("python_srw_aqm") diff --git a/modulefiles/tasks/hera/nexus_post_split.local.lua b/modulefiles/tasks/hera/nexus_post_split.local.lua index 0e7132d749..2eb2ea2ee0 100644 --- a/modulefiles/tasks/hera/nexus_post_split.local.lua +++ b/modulefiles/tasks/hera/nexus_post_split.local.lua @@ -1,2 +1,2 @@ -load("python_srw_cmaq") -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) +load("python_srw_aqm") +load(pathJoin("nco", os.getenv("nco_ver") or "5.1.6")) diff --git a/modulefiles/tasks/hera/point_source.local.lua b/modulefiles/tasks/hera/point_source.local.lua index 89feda226c..df0e35d5da 100644 --- a/modulefiles/tasks/hera/point_source.local.lua +++ b/modulefiles/tasks/hera/point_source.local.lua @@ -1 +1 @@ -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/hera/pre_post_stat.local.lua b/modulefiles/tasks/hera/pre_post_stat.local.lua index 23370a8d60..ede4c61606 100644 --- a/modulefiles/tasks/hera/pre_post_stat.local.lua +++ b/modulefiles/tasks/hera/pre_post_stat.local.lua @@ -1 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) +load(pathJoin("nco", os.getenv("nco_ver") or "5.1.6")) diff --git a/modulefiles/tasks/hercules/aqm_ics.local.lua b/modulefiles/tasks/hercules/aqm_ics.local.lua index c7ac9dcb90..2aac950d8d 100644 --- a/modulefiles/tasks/hercules/aqm_ics.local.lua +++ b/modulefiles/tasks/hercules/aqm_ics.local.lua @@ -1,2 +1,2 @@ load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/hercules/fire_emission.local.lua b/modulefiles/tasks/hercules/fire_emission.local.lua index c7ac9dcb90..2aac950d8d 100644 --- a/modulefiles/tasks/hercules/fire_emission.local.lua +++ b/modulefiles/tasks/hercules/fire_emission.local.lua @@ -1,2 +1,2 @@ load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/hercules/nexus_emission.local.lua b/modulefiles/tasks/hercules/nexus_emission.local.lua index c7ac9dcb90..2aac950d8d 100644 --- a/modulefiles/tasks/hercules/nexus_emission.local.lua +++ b/modulefiles/tasks/hercules/nexus_emission.local.lua @@ -1,2 +1,2 @@ load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/hercules/nexus_post_split.local.lua b/modulefiles/tasks/hercules/nexus_post_split.local.lua index c7ac9dcb90..2aac950d8d 100644 --- a/modulefiles/tasks/hercules/nexus_post_split.local.lua +++ b/modulefiles/tasks/hercules/nexus_post_split.local.lua @@ -1,2 +1,2 @@ load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/hercules/point_source.local.lua b/modulefiles/tasks/hercules/point_source.local.lua index 89feda226c..df0e35d5da 100644 --- a/modulefiles/tasks/hercules/point_source.local.lua +++ b/modulefiles/tasks/hercules/point_source.local.lua @@ -1 +1 @@ -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/orion/aqm_ics.local.lua b/modulefiles/tasks/orion/aqm_ics.local.lua index c7ac9dcb90..2aac950d8d 100644 --- a/modulefiles/tasks/orion/aqm_ics.local.lua +++ b/modulefiles/tasks/orion/aqm_ics.local.lua @@ -1,2 +1,2 @@ load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/orion/fire_emission.local.lua b/modulefiles/tasks/orion/fire_emission.local.lua index c7ac9dcb90..2aac950d8d 100644 --- a/modulefiles/tasks/orion/fire_emission.local.lua +++ b/modulefiles/tasks/orion/fire_emission.local.lua @@ -1,2 +1,2 @@ load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/orion/nexus_emission.local.lua b/modulefiles/tasks/orion/nexus_emission.local.lua index c7ac9dcb90..2aac950d8d 100644 --- a/modulefiles/tasks/orion/nexus_emission.local.lua +++ b/modulefiles/tasks/orion/nexus_emission.local.lua @@ -1,2 +1,2 @@ load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/orion/nexus_post_split.local.lua b/modulefiles/tasks/orion/nexus_post_split.local.lua index c7ac9dcb90..2aac950d8d 100644 --- a/modulefiles/tasks/orion/nexus_post_split.local.lua +++ b/modulefiles/tasks/orion/nexus_post_split.local.lua @@ -1,2 +1,2 @@ load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/orion/point_source.local.lua b/modulefiles/tasks/orion/point_source.local.lua index 89feda226c..df0e35d5da 100644 --- a/modulefiles/tasks/orion/point_source.local.lua +++ b/modulefiles/tasks/orion/point_source.local.lua @@ -1 +1 @@ -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/parm/aqm.rc b/parm/aqm.rc index 3d2ad32711..4ffaf5095e 100644 --- a/parm/aqm.rc +++ b/parm/aqm.rc @@ -7,14 +7,14 @@ # # General settings # -ae_matrix_nml: {{ aqm_config_dir }}/AE_cb6r3_ae6_aq.nml -gc_matrix_nml: {{ aqm_config_dir }}/GC_cb6r3_ae6_aq.nml -nr_matrix_nml: {{ aqm_config_dir }}/NR_cb6r3_ae6_aq.nml -tr_matrix_nml: {{ aqm_config_dir }}/Species_Table_TR_0.nml +ae_matrix_nml: {{ fixaqm }}/epa/AE_cb6r3_ae6_aq.nml +gc_matrix_nml: {{ fixaqm }}/epa/GC_cb6r3_ae6_aq.nml +nr_matrix_nml: {{ fixaqm }}/epa/NR_cb6r3_ae6_aq.nml +tr_matrix_nml: {{ fixaqm }}/epa/Species_Table_TR_0.nml -csqy_data: {{ aqm_config_dir }}/CSQY_DATA_cb6r3_ae6_aq -optics_data: {{ aqm_config_dir }}/PHOT_OPTICS.dat -omi_data: {{ aqm_config_dir }}/omi_cmaq_2015_361X179.dat +csqy_data: {{ fixaqm }}/epa/CSQY_DATA_cb6r3_ae6_aq +optics_data: {{ fixaqm }}/epa/PHOT_OPTICS.dat +omi_data: {{ fixaqm }}/epa/omi_cmaq_2015_361X179.dat init_concentrations: {{ init_concentrations | lower }} @@ -172,7 +172,7 @@ bio_format: netcdf bio_file: {{ aqm_rc_bio_file_fp }} bio_frequency: static bio_period: summer -bio_speciation_file: {{ dcominbio }}/gspro_biogenics_1mar2017.txt +bio_speciation_file: {{ fixaqm }}/bio/gspro_biogenics_1mar2017.txt bio_speciation_profile: B10C6 bio_species:: AVG_NOAG_GROW 1.00000 AVG_NOAG_GROW gmN/hr diff --git a/parm/wflow/aqm_post.yaml b/parm/wflow/aqm_post.yaml index 31b7b34848..5f307184d3 100644 --- a/parm/wflow/aqm_post.yaml +++ b/parm/wflow/aqm_post.yaml @@ -5,7 +5,7 @@ default_aqm_task: &default_aqm maxtries: '2' envars: &default_vars GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;' - USHdir: '&USHdir;' + HOMEdir: '&HOMEdir;' PDY: !cycstr "@Y@m@d" cyc: !cycstr "@H" nprocs: '{{ parent.nnodes * parent.ppn // 1 }}' @@ -22,21 +22,21 @@ default_aqm_task: &default_aqm task_pre_post_stat: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "pre_post_stat" "&JOBSdir;/JREGIONAL_PRE_POST_STAT"' + command: '&LOAD_MODULES_RUN_TASK_FP; "pre_post_stat" "&HOMEdir;/jobs/JSRW_PRE_POST_STAT"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' dependency: or: datadep: attrs: age: 00:00:00:05 - text: !cycstr '&COMIN_DIR;/post_@Y@m@d@H_task_complete.txt' + text: !cycstr '&DATAROOT;/DATA_SHARE/@Y@m@d@H/post_@Y@m@d@H_task_complete.txt' metataskdep: attrs: metatask: run_ens_post task_post_stat_o3: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "post_stat_o3" "&JOBSdir;/JREGIONAL_POST_STAT_O3"' + command: '&LOAD_MODULES_RUN_TASK_FP; "post_stat_o3" "&HOMEdir;/jobs/JSRW_POST_STAT_O3"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' memory: 120G dependency: @@ -46,7 +46,7 @@ task_post_stat_o3: task_post_stat_pm25: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "post_stat_pm25" "&JOBSdir;/JREGIONAL_POST_STAT_PM25"' + command: '&LOAD_MODULES_RUN_TASK_FP; "post_stat_pm25" "&HOMEdir;/jobs/JSRW_POST_STAT_PM25"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' memory: 120G dependency: @@ -56,7 +56,7 @@ task_post_stat_pm25: task_bias_correction_o3: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "bias_correction_o3" "&JOBSdir;/JREGIONAL_BIAS_CORRECTION_O3"' + command: '&LOAD_MODULES_RUN_TASK_FP; "bias_correction_o3" "&HOMEdir;/jobs/JSRW_BIAS_CORRECTION_O3"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' memory: 120G dependency: @@ -66,7 +66,7 @@ task_bias_correction_o3: task_bias_correction_pm25: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "bias_correction_pm25" "&JOBSdir;/JREGIONAL_BIAS_CORRECTION_PM25"' + command: '&LOAD_MODULES_RUN_TASK_FP; "bias_correction_pm25" "&HOMEdir;/jobs/JSRW_BIAS_CORRECTION_PM25"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' memory: 120G dependency: diff --git a/parm/wflow/aqm_prep.yaml b/parm/wflow/aqm_prep.yaml index 6cfab161d7..d8f01d2c82 100644 --- a/parm/wflow/aqm_prep.yaml +++ b/parm/wflow/aqm_prep.yaml @@ -5,12 +5,19 @@ default_aqm_task: &default_aqm maxtries: '2' envars: &default_vars GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;' - USHdir: '&USHdir;' + HOMEdir: '&HOMEdir;' + envir: '&envir;' + model_ver: '&model_ver;' + KEEPDATA: '&KEEPDATA;' + SENDCOM: '&SENDCOM;' + COMROOT: '&COMROOT;' + DATAROOT: '&DATAROOT;' + DCOMROOT: '&DCOMROOT;' + LOGDIR: !cycstr "&LOGDIR;" PDY: !cycstr "@Y@m@d" cyc: !cycstr "@H" nprocs: '{{ parent.nnodes * parent.ppn // 1 }}' subcyc: !cycstr "@M" - LOGDIR: !cycstr "&LOGDIR;" SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' native: '{{ platform.SCHED_NATIVE_CMD }}' nnodes: 1 @@ -22,7 +29,7 @@ default_aqm_task: &default_aqm task_nexus_gfs_sfc: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_gfs_sfc" "&JOBSdir;/JREGIONAL_NEXUS_GFS_SFC"' + command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_gfs_sfc" "&HOMEdir;/jobs/JSRW_NEXUS_GFS_SFC"' native: '{% if platform.get("SCHED_NATIVE_CMD_HPSS") %}{{ platform.SCHED_NATIVE_CMD_HPSS }}{% else %}{{ platform.SCHED_NATIVE_CMD}}{% endif %}' partition: '{% if platform.get("PARTITION_HPSS") %}&PARTITION_HPSS;{% else %}None{% endif %}' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' @@ -46,9 +53,9 @@ metatask_nexus_emission: nspt: '{% for h in range(0, cpl_aqm_parm.NUM_SPLIT_NEXUS) %}{{ " %02d" % h }}{% endfor %}' task_nexus_emission_#nspt#: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_emission" "&JOBSdir;/JREGIONAL_NEXUS_EMISSION"' + command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_emission" "&HOMEdir;/jobs/JSRW_NEXUS_EMISSION"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' - nnodes: 4 + nnodes: '{{ task_nexus_emission.NNODES_NEXUS_EMISSION }}' ppn: '{{ task_nexus_emission.PPN_NEXUS_EMISSION // 1 }}' walltime: 01:00:00 envars: @@ -61,7 +68,7 @@ metatask_nexus_emission: task_nexus_post_split: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_post_split" "&JOBSdir;/JREGIONAL_NEXUS_POST_SPLIT"' + command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_post_split" "&HOMEdir;/jobs/JSRW_NEXUS_POST_SPLIT"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' dependency: metataskdep: @@ -70,13 +77,13 @@ task_nexus_post_split: task_fire_emission: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "fire_emission" "&JOBSdir;/JREGIONAL_FIRE_EMISSION"' + command: '&LOAD_MODULES_RUN_TASK_FP; "fire_emission" "&HOMEdir;/jobs/JSRW_FIRE_EMISSION"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' memory: 2G task_point_source: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "point_source" "&JOBSdir;/JREGIONAL_POINT_SOURCE"' + command: '&LOAD_MODULES_RUN_TASK_FP; "point_source" "&HOMEdir;/jobs/JSRW_POINT_SOURCE"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' walltime: 01:00:00 dependency: @@ -94,7 +101,7 @@ task_aqm_ics_ext: attrs: cycledefs: at_start maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_ics" "&JOBSdir;/JREGIONAL_AQM_ICS"' + command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_ics" "&HOMEdir;/jobs/JSRW_AQM_ICS"' envars: <<: *default_vars PREV_CYCLE_DIR: '&WARMSTART_CYCLE_DIR;' @@ -119,7 +126,7 @@ task_aqm_ics: attrs: cycledefs: cycled_from_second maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_ics" "&JOBSdir;/JREGIONAL_AQM_ICS"' + command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_ics" "&HOMEdir;/jobs/JSRW_AQM_ICS"' envars: <<: *default_vars PREV_CYCLE_DIR: '&COMIN_DIR;' @@ -137,11 +144,11 @@ task_aqm_ics: datadep_tracer: attrs: age: 00:00:00:05 - text: &COMIN_DIR;/RESTART/fv_tracer.res.tile1.nc + text: '&COMIN_DIR;/RESTART/fv_tracer.res.tile1.nc' task_aqm_lbcs: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_lbcs" "&JOBSdir;/JREGIONAL_AQM_LBCS"' + command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_lbcs" "&HOMEdir;/jobs/JSRW_AQM_LBCS"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' ppn: 24 dependency: diff --git a/parm/wflow/default_workflow.yaml b/parm/wflow/default_workflow.yaml index b70ad8dbb2..c79415b3be 100644 --- a/parm/wflow/default_workflow.yaml +++ b/parm/wflow/default_workflow.yaml @@ -5,18 +5,18 @@ rocoto: entities: ACCOUNT: '{{ user.ACCOUNT }}' CCPA_OBS_DIR: '{{ platform.CCPA_OBS_DIR }}' - COMIN_DIR: '{% if user.RUN_ENVIR == "nco" %}{{"{}/{}.@Y@m@d/@H".format(nco.COMIN_BASEDIR,nco.RUN_default)}}{% else %}{{"{}/@Y@m@d@H".format(workflow.EXPTDIR)}}{% endif %}' + COLDSTART: '{{ workflow.COLDSTART }}' COMINgfs: '{{ platform.get("COMINgfs") }}' - FCST_DIR: '{% if user.RUN_ENVIR == "nco" %}{{"{}/run_fcst_mem#mem#.{}_@Y@m@d@H".format(nco.DATAROOT_default,workflow.WORKFLOW_ID)}}{% else %}{{"{}/@Y@m@d@H".format(workflow.EXPTDIR)}}{% endif %}' GLOBAL_VAR_DEFNS_FP: '{{ workflow.GLOBAL_VAR_DEFNS_FP }}' + HOMEdir: '{{ user.HOMEdir }}' JOBSdir: '{{ user.JOBSdir }}' + KEEPDATA: '{{ nco.KEEPDATA_default }}' LOAD_MODULES_RUN_TASK_FP: '{{ workflow.LOAD_MODULES_RUN_TASK_FP }}' - LOGDIR: '{% if user.RUN_ENVIR == "nco" %}{{"{}/@Y@m@d".format(nco.LOGBASEDIR_default)}}{% else %}{{nco.LOGBASEDIR_default }}{% endif %}' - LOGEXT: '{% if user.RUN_ENVIR == "nco" %}{{".{}.log".format(workflow.WORKFLOW_ID)}}{% else %}{{".log"}}{% endif %}' + LOGEXT: ".log" + NET: '{{ nco.NET_default }}' MRMS_OBS_DIR: '{{ platform.MRMS_OBS_DIR }}' NCORES_PER_NODE: '{{ platform.NCORES_PER_NODE }}' NDAS_OBS_DIR: '{{ platform.NDAS_OBS_DIR }}' - NET: '{{ nco.NET_default }}' NOHRSC_OBS_DIR: '{{ platform.NOHRSC_OBS_DIR }}' PARTITION_DEFAULT: '{{ platform.get("PARTITION_DEFAULT") }}' PARTITION_FCST: '{{ platform.get("PARTITION_FCST") }}' @@ -26,11 +26,20 @@ rocoto: QUEUE_HPSS: '{{ platform.get("QUEUE_HPSS") }}' RUN: '{{ nco.RUN_default }}' SCRIPTSdir: '{{ user.SCRIPTSdir }}' + SENDCOM: '{{ nco.SENDCOM_default }}' SLASH_ENSMEM_SUBDIR: '{% if global.DO_ENSEMBLE %}{{ "/mem#mem#" }}{% else %}{{ "/" }}{% endif %}' USHdir: '{{ user.USHdir }}' - COLDSTART: '{{ workflow.COLDSTART }}' WARMSTART_CYCLE_DIR: '{{ workflow.WARMSTART_CYCLE_DIR }}' WORKFLOW_ID: '{{ workflow.WORKFLOW_ID }}' + + envir: '{{ nco.envir_default }}' + model_ver: '{{ nco.model_ver_default }}' + COMROOT: '{{ nco.PTMP }}/&envir;/com' + DATAROOT: '{{ nco.PTMP }}/&envir;/tmp' + DCOMROOT: '{{ nco.PTMP }}/&envir;/dcom' + COMIN_DIR: '{% if user.RUN_ENVIR == "nco" %}&COMROOT;/&NET;/&model_ver;/&RUN;.@Y@m@d/@H{% else %}{{ workflow.EXPTDIR }}/@Y@m@d@H{% endif %}' + FCST_DIR: '{% if user.RUN_ENVIR == "nco" %}&DATAROOT;/run_fcst_mem#mem#_@Y@m@d@H{% else %}{{ workflow.EXPTDIR }}/@Y@m@d@H{% endif %}' + LOGDIR: '{% if user.RUN_ENVIR == "nco" %}&COMROOT;/output/logs/@Y@m@d{% else %}{{ workflow.EXPTDIR }}/log{% endif %}' attrs: cyclethrottle: "200" realtime: "F" diff --git a/scripts/exregional_fire_emission.sh b/scripts/exregional_fire_emission.sh deleted file mode 100755 index ef1b4e291d..0000000000 --- a/scripts/exregional_fire_emission.sh +++ /dev/null @@ -1,198 +0,0 @@ -#!/usr/bin/env bash - -# -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_fire_emission" ${GLOBAL_VAR_DEFNS_FP} -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the ex-script for the task that fetches fire emission -data files from disk or generates model-ready RAVE emission file from raw -data files. -========================================================================" -# -#----------------------------------------------------------------------- -# -# Set up variables for call to retrieve_data.py -# -#----------------------------------------------------------------------- -# -yyyymmdd=${FIRE_FILE_CDATE:0:8} -hh=${FIRE_FILE_CDATE:8:2} - -CDATE_mh1=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC - 1 hours" "+%Y%m%d%H" ) - -yyyymmdd_mh1=${CDATE_mh1:0:8} -hh_mh1=${CDATE_mh1:8:2} -# -#----------------------------------------------------------------------- -# -# Retrieve fire file to FIRE_EMISSION_STAGING_DIR -# -#----------------------------------------------------------------------- -# -aqm_fire_file_fn="${AQM_FIRE_FILE_PREFIX}_${yyyymmdd}_t${hh}z${AQM_FIRE_FILE_SUFFIX}" - -# Check if the fire file exists in the designated directory -if [ -e "${DCOMINfire}/${aqm_fire_file_fn}" ]; then - cp_vrfy "${DCOMINfire}/${aqm_fire_file_fn}" "${FIRE_EMISSION_STAGING_DIR}" -else - # Copy raw data - for ihr in {0..23}; do - download_time=$( $DATE_UTIL --utc --date "${yyyymmdd_mh1} ${hh_mh1} UTC - $ihr hours" "+%Y%m%d%H" ) - FILE_13km="Hourly_Emissions_13km_${download_time}00_${download_time}00.nc" - yyyymmdd_dn=${download_time:0:8} - hh_dn=${download_time:8:2} - missing_download_time=$( $DATE_UTIL --utc --date "${yyyymmdd_dn} ${hh_dn} UTC - 24 hours" "+%Y%m%d%H" ) - yyyymmdd_dn_md1=${missing_download_time:0:8} - FILE_13km_md1=Hourly_Emissions_13km_${missing_download_time}00_${missing_download_time}00.nc - if [ -e "${DCOMINfire}/${yyyymmdd_dn}/rave/${FILE_13km}" ]; then - cp_vrfy "${DCOMINfire}/${yyyymmdd_dn}/rave/${FILE_13km}" . - elif [ -e "${DCOMINfire}/${yyyymmdd_dn_md1}/rave/${FILE_13km_md1}" ]; then - echo "WARNING: ${FILE_13km} does not exist. Replacing with the file of previous date ..." - cp_vrfy "${DCOMINfire}/${yyyymmdd_dn_md1}/rave/${FILE_13km_md1}" "${FILE_13km}" - else - message_txt="Fire Emission RAW data does not exist: - FILE_13km_md1 = \"${FILE_13km_md1}\" - DCOMINfire = \"${DCOMINfire}\"" - - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - cp_vrfy "${DCOMINfire}/Hourly_Emissions_13km_dummy.nc" "${FILE_13km}" - message_warning="WARNING: ${message_txt}. Replacing with the dummy file :: AQM RUN SOFT FAILED." - print_info_msg "${message_warning}" - if [ ! -z "${maillist}" ]; then - echo "${message_warning}" | mail.py $maillist - fi - else - print_err_msg_exit "${message_txt}" - fi - fi - done - - ncks -O -h --mk_rec_dmn time Hourly_Emissions_13km_${download_time}00_${download_time}00.nc temp.nc - export err=$? - if [ $err -ne 0 ]; then - message_txt="Call to NCKS returned with nonzero exit code." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi - fi - - mv_vrfy temp.nc Hourly_Emissions_13km_${download_time}00_${download_time}00.nc - - ncrcat -h Hourly_Emissions_13km_*.nc Hourly_Emissions_13km_${yyyymmdd}0000_${yyyymmdd}2300.t${cyc}z.nc - export err=$? - if [ $err -ne 0 ]; then - message_txt="Call to NCRCAT returned with nonzero exit code." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi - fi - - input_fire="${DATA}/Hourly_Emissions_13km_${yyyymmdd}0000_${yyyymmdd}2300.t${cyc}z.nc" - output_fire="${DATA}/Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_new24.t${cyc}z.nc" - - python3 ${HOMEdir}/sorc/AQM-utils/python_utils/RAVE_remake.allspecies.aqmna13km.g793.py --date "${yyyymmdd}" --cyc "${hh}" --input_fire "${input_fire}" --output_fire "${output_fire}" - export err=$? - if [ $err -ne 0 ]; then - message_txt="Call to python script \"RAVE_remake.allspecies.py\" returned with nonzero exit code." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi - fi - - ncks --mk_rec_dmn Time Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_new24.t${cyc}z.nc -o Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_t${cyc}z_h24.nc - export err=$? - if [ $err -ne 0 ]; then - message_txt="Call to NCKS returned with nonzero exit code." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi - fi - - ncrcat Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_t${cyc}z_h24.nc Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_t${cyc}z_h24.nc Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_t${cyc}z_h24.nc ${aqm_fire_file_fn} - export err=$? - if [ $err -ne 0 ]; then - message_txt="Call to NCRCAT returned with nonzero exit code." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi - fi - - # Copy the final fire emission file to STAGING_DIR - cp_vrfy "${DATA}/${aqm_fire_file_fn}" "${FIRE_EMISSION_STAGING_DIR}" - - # Archive the final fire emission file to disk and HPSS - if [ "${DO_AQM_SAVE_FIRE}" = "TRUE" ]; then - cp "${DATA}/${aqm_fire_file_fn}" ${DCOMINfire} - - hsi_log_fn="log.hsi_put.${yyyymmdd}_${hh}" - hsi put ${aqm_fire_file_fn} : ${AQM_FIRE_ARCHV_DIR}/${aqm_fire_file_fn} >& ${hsi_log_fn} - export err=$? - if [ $err -ne 0 ]; then - message_txt="htar file writing operation (\"hsi put ...\") failed. Check the log -file hsi_log_fn in the DATA directory for details: - DATA = \"${DATA}\" - hsi_log_fn = \"${hsi_log_fn}\"" - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi - fi - fi -fi -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/function. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh index 60852095ee..0fd6b0884d 100755 --- a/scripts/exregional_make_ics.sh +++ b/scripts/exregional_make_ics.sh @@ -643,15 +643,23 @@ POST_STEP # #----------------------------------------------------------------------- # -mv_vrfy out.atm.tile${TILE_RGNL}.nc \ - ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc - -mv_vrfy out.sfc.tile${TILE_RGNL}.nc \ - ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc - -mv_vrfy gfs_ctrl.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc - -mv_vrfy gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc +if [ "${CPL_AQM}" = "TRUE" ]; then + COMOUT="${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}" #temporary path, should be removed later + if [ "${COLDSTART}" = "TRUE" ] && [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ]; then + data_trans_path="${COMOUT}" + else + data_trans_path="${DATA_SHARE}" + fi + cpreq -p out.atm.tile${TILE_RGNL}.nc "${data_trans_path}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc" + cpreq -p out.sfc.tile${TILE_RGNL}.nc "${COMOUT}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc" + cpreq -p gfs_ctrl.nc "${COMOUT}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc" + cpreq -p gfs.bndy.nc "${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc" +else + mv_vrfy out.atm.tile${TILE_RGNL}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc + mv_vrfy out.sfc.tile${TILE_RGNL}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc + mv_vrfy gfs_ctrl.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc + mv_vrfy gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc +fi # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh index fcde8e6f46..3a7f586051 100755 --- a/scripts/exregional_make_lbcs.sh +++ b/scripts/exregional_make_lbcs.sh @@ -559,7 +559,11 @@ located in the following directory: lbc_spec_fhrs=( "${EXTRN_MDL_FHRS[$i]}" ) fcst_hhh=$(( ${lbc_spec_fhrs} - ${EXTRN_MDL_LBCS_OFFSET_HRS} )) fcst_hhh_FV3LAM=$( printf "%03d" "$fcst_hhh" ) - mv_vrfy gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc + if [ "${CPL_AQM}" = "TRUE" ]; then + cpreq -p gfs.bndy.nc ${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc + else + mv_vrfy gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc + fi fi done diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index 723086b077..0013fad47d 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -233,42 +233,59 @@ cd_vrfy ${DATA}/INPUT # relative_link_flag="FALSE" -target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc" -symlink="gfs_data.nc" -create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" +if [ "${CPL_AQM}" = "TRUE" ]; then + COMIN="${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}" #temporary path, should be removed later -target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc" -symlink="sfc_data.nc" -create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" + target="${COMIN}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc" + symlink="gfs_data.nc" + create_symlink_to_file target="$target" symlink="$symlink" relative="${relative_link_flag}" -target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc" -symlink="gfs_ctrl.nc" -create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" + target="${COMIN}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc" + symlink="sfc_data.nc" + create_symlink_to_file target="$target" symlink="$symlink" relative="${relative_link_flag}" + + target="${COMIN}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc" + symlink="gfs_ctrl.nc" + create_symlink_to_file target="$target" symlink="$symlink" relative="${relative_link_flag}" + + for fhr in $(seq -f "%03g" 0 ${LBC_SPEC_INTVL_HRS} ${FCST_LEN_HRS}); do + target="${COMIN}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f${fhr}.nc" + symlink="gfs_bndy.tile${TILE_RGNL}.${fhr}.nc" + create_symlink_to_file target="$target" symlink="$symlink" relative="${relative_link_flag}" + done + target="${COMIN}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt.nc" + symlink="NEXUS_Expt.nc" + create_symlink_to_file target="$target" symlink="$symlink" relative="${relative_link_flag}" + + # create symlink to PT for point source in SRW-AQM + target="${COMIN}/${NET}.${cycle}${dot_ensmem}.PT.nc" + if [ -f ${target} ]; then + symlink="PT.nc" + create_symlink_to_file target="$target" symlink="$symlink" relative="${relative_link_flag}" + fi +else + target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc" + symlink="gfs_data.nc" + create_symlink_to_file target="$target" symlink="$symlink" \ + relative="${relative_link_flag}" -for fhr in $(seq -f "%03g" 0 ${LBC_SPEC_INTVL_HRS} ${FCST_LEN_HRS}); do - target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f${fhr}.nc" - symlink="gfs_bndy.tile${TILE_RGNL}.${fhr}.nc" + target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc" + symlink="sfc_data.nc" create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" -done + relative="${relative_link_flag}" -if [ "${CPL_AQM}" = "TRUE" ]; then - target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt.nc" - symlink="NEXUS_Expt.nc" + target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc" + symlink="gfs_ctrl.nc" create_symlink_to_file target="$target" symlink="$symlink" \ relative="${relative_link_flag}" - # create symlink to PT for point source in Online-CMAQ - target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.PT.nc" - if [ -f ${target} ]; then - symlink="PT.nc" + for fhr in $(seq -f "%03g" 0 ${LBC_SPEC_INTVL_HRS} ${FCST_LEN_HRS}); do + target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f${fhr}.nc" + symlink="gfs_bndy.tile${TILE_RGNL}.${fhr}.nc" create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" - fi + relative="${relative_link_flag}" + done fi # #----------------------------------------------------------------------- diff --git a/scripts/exregional_aqm_ics.sh b/scripts/exsrw_aqm_ics.sh similarity index 68% rename from scripts/exregional_aqm_ics.sh rename to scripts/exsrw_aqm_ics.sh index 676cc4ed90..9104374705 100755 --- a/scripts/exregional_aqm_ics.sh +++ b/scripts/exsrw_aqm_ics.sh @@ -7,7 +7,7 @@ # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. ${USHsrw}/source_util_funcs.sh source_config_for_task "task_aqm_ics" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +17,7 @@ source_config_for_task "task_aqm_ics" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; set -xue; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -53,31 +53,31 @@ tial or boundary condition files for the FV3 will be generated. # #----------------------------------------------------------------------- # -rst_dir=${PREV_CYCLE_DIR}/RESTART -rst_file=fv_tracer.res.tile1.nc -fv_tracer_file=${rst_dir}/${PDY}.${cyc}0000.${rst_file} -print_info_msg " - Looking for tracer restart file: \"${fv_tracer_file}\"" +rst_dir="${PREV_CYCLE_DIR}/RESTART" +rst_file="fv_tracer.res.tile1.nc" +fv_tracer_file="${rst_dir}/${PDY}.${cyc}0000.${rst_file}" +print_info_msg "Looking for tracer restart file: \"${fv_tracer_file}\"" if [ ! -r ${fv_tracer_file} ]; then if [ -r ${rst_dir}/coupler.res ]; then rst_info=( $( tail -n 1 ${rst_dir}/coupler.res ) ) - rst_date=$( printf "%04d%02d%02d%02d" ${rst_info[@]:0:4} ) + # Remove leading zeros from ${rst_info[1]} + month="${rst_info[1]#"${rst_info[1]%%[!0]*}"}" + # Remove leading zeros from ${rst_info[2]} + day="${rst_info[2]#"${rst_info[2]%%[!0]*}"}" + # Format the date without leading zeros + rst_date=$(printf "%04d%02d%02d%02d" ${rst_info[0]} $((10#$month)) $((10#$day)) ${rst_info[3]}) print_info_msg " Tracer file not found. Checking available restart date: requested date: \"${PDY}${cyc}\" available date: \"${rst_date}\"" if [ "${rst_date}" = "${PDY}${cyc}" ] ; then - fv_tracer_file=${rst_dir}/${rst_file} + fv_tracer_file="${rst_dir}/${rst_file}" if [ -r ${fv_tracer_file} ]; then - print_info_msg " - Tracer file found: \"${fv_tracer_file}\"" + print_info_msg "Tracer file found: \"${fv_tracer_file}\"" else - message_txt="No suitable tracer restart file found." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2"]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + message_txt="FATAL ERROR No suitable tracer restart file ${rst_dir}/${rst_file} found." + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi fi fi @@ -88,46 +88,44 @@ fi # Add air quality tracer variables from previous cycle's restart output # to atmosphere's initial condition file according to the steps below: # -# a. Python script to manipulate the files (see comments inside for -# details) +# a. Python script to manipulate the files (see comments inside for details) # b. Remove checksum attribute to prevent overflow -# # c. Rename reulting file as the expected atmospheric IC file # #----------------------------------------------------------------------- # -gfs_ic_file=${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc -wrk_ic_file=${DATA}/gfs.nc +gfs_ic_fn="${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc" +gfs_ic_fp="${DATA_SHARE}/${gfs_ic_fn}" +wrk_ic_fp="${DATA}/gfs.nc" print_info_msg " Adding air quality tracers to atmospheric initial condition file: tracer file: \"${fv_tracer_file}\" - FV3 IC file: \"${gfs_ic_file}\"" + FV3 IC file: \"${gfs_ic_fp}\"" -cp_vrfy ${gfs_ic_file} ${wrk_ic_file} -python3 ${HOMEdir}/sorc/AQM-utils/python_utils/add_aqm_ics.py --fv_tracer_file "${fv_tracer_file}" --wrk_ic_file "${wrk_ic_file}" +cpreq ${gfs_ic_fp} ${wrk_ic_fp} +${USHsrw}/aqm_utils_python/add_aqm_ics.py --fv_tracer_file "${fv_tracer_file}" --wrk_ic_file "${wrk_ic_fp}" export err=$? if [ $err -ne 0 ]; then message_txt="Call to python script \"add_aqm_ics.py\" failed." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi ncatted -a checksum,,d,s, tmp1.nc export err=$? if [ $err -ne 0 ]; then message_txt="Call to NCATTED returned with nonzero exit code." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi -cp_vrfy tmp1.nc ${gfs_ic_file} +mv tmp1.nc ${gfs_ic_fn} + +cpreq -p ${gfs_ic_fn} ${COMOUT} +cpreq -p "${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc" ${COMOUT} +cpreq -p "${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc" ${COMOUT} +cpreq -p "${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc" ${COMOUT} unset fv_tracer_file unset wrk_ic_file @@ -138,20 +136,17 @@ unset wrk_ic_file # #----------------------------------------------------------------------- # - print_info_msg " +print_info_msg " ======================================================================== -Successfully added air quality tracers to atmospheric initial condition -file!!! +Successfully added air quality tracers to atmospheric IC file!!! Exiting script: \"${scrfunc_fn}\" In directory: \"${scrfunc_dir}\" ========================================================================" - # #----------------------------------------------------------------------- # -# Restore the shell options saved at the beginning of this script/func- -# tion. +# Restore the shell options saved at the beginning of this script/function. # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_aqm_lbcs.sh b/scripts/exsrw_aqm_lbcs.sh similarity index 67% rename from scripts/exregional_aqm_lbcs.sh rename to scripts/exsrw_aqm_lbcs.sh index 09a33d40a2..f6d932962e 100755 --- a/scripts/exregional_aqm_lbcs.sh +++ b/scripts/exsrw_aqm_lbcs.sh @@ -7,7 +7,7 @@ # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. ${USHsrw}/source_util_funcs.sh source_config_for_task "task_get_extrn_lbcs|task_make_orog|task_make_lbcs|cpl_aqm_parm|task_aqm_lbcs" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +17,7 @@ source_config_for_task "task_get_extrn_lbcs|task_make_orog|task_make_lbcs|cpl_aq # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; set -xue; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -77,10 +77,10 @@ fi # #----------------------------------------------------------------------- # -CDATE_MOD=$( $DATE_UTIL --utc --date "${PDY} ${cyc} UTC - ${EXTRN_MDL_LBCS_OFFSET_HRS} hours" "+%Y%m%d%H" ) -yyyymmdd=${CDATE_MOD:0:8} -mm="${CDATE_MOD:4:2}" -hh="${CDATE_MOD:8:2}" +CDATE_MOD=`$NDATE -${EXTRN_MDL_LBCS_OFFSET_HRS} ${PDY}${cyc}` +YYYYMMDD="${CDATE_MOD:0:8}" +MM="${CDATE_MOD:4:2}" +HH="${CDATE_MOD:8:2}" if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then cyc_mod=$(( ${cyc} - ${DATE_FIRST_CYCL:8:2} )) @@ -92,38 +92,40 @@ for i_lbc in $(seq ${LBC_SPEC_INTVL_HRS} ${LBC_SPEC_INTVL_HRS} ${FCST_LEN_HRS} ) LBC_SPEC_FCST_HRS+=("$i_lbc") done -if [ ${DO_AQM_CHEM_LBCS} = "TRUE" ]; then - - ext_lbcs_file=${AQM_LBCS_FILES} - chem_lbcs_fn=${ext_lbcs_file///${mm}} +# Copy lbcs files from DATA_SHARE +aqm_lbcs_fn_prefix="${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f" +for hr in 0 ${LBC_SPEC_FCST_HRS[@]}; do + fhr=$( printf "%03d" "${hr}" ) + aqm_lbcs_fn="${aqm_lbcs_fn_prefix}${fhr}.nc" + cpreq "${DATA_SHARE}/${aqm_lbcs_fn}" ${DATA} +done - chem_lbcs_fp=${DCOMINchem_lbcs}/${chem_lbcs_fn} +if [ "${DO_AQM_CHEM_LBCS}" = "TRUE" ]; then + ext_lbcs_file="${AQM_LBCS_FILES}" + chem_lbcs_fn=${ext_lbcs_file///${MM}} + chem_lbcs_fp="${FIXaqm}/chemlbc/${chem_lbcs_fn}" if [ -f ${chem_lbcs_fp} ]; then #Copy the boundary condition file to the current location - cp_vrfy ${chem_lbcs_fp} . + cpreq ${chem_lbcs_fp} . else message_txt="The chemical LBC files do not exist: CHEM_BOUNDARY_CONDITION_FILE = \"${chem_lbcs_fp}\"" - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi for hr in 0 ${LBC_SPEC_FCST_HRS[@]}; do fhr=$( printf "%03d" "${hr}" ) - if [ -r ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fhr}.nc ]; then - ncks -A ${chem_lbcs_fn} ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fhr}.nc + aqm_lbcs_fn="${aqm_lbcs_fn_prefix}${fhr}.nc" + if [ -r "${aqm_lbcs_fn}" ]; then + ncks -A ${chem_lbcs_fn} ${aqm_lbcs_fn} export err=$? if [ $err -ne 0 ]; then message_txt="Call to NCKS returned with nonzero exit code." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi + cpreq ${aqm_lbcs_fn} "${aqm_lbcs_fn}_chemlbc" fi done @@ -139,54 +141,49 @@ fi # #----------------------------------------------------------------------- # -if [ ${DO_AQM_GEFS_LBCS} = "TRUE" ]; then - - AQM_GEFS_FILE_CYC=${AQM_GEFS_FILE_CYC:-"${hh}"} +if [ "${DO_AQM_GEFS_LBCS}" = "TRUE" ]; then + AQM_GEFS_FILE_CYC=${AQM_GEFS_FILE_CYC:-"${HH}"} AQM_GEFS_FILE_CYC=$( printf "%02d" "${AQM_GEFS_FILE_CYC}" ) - GEFS_CYC_DIFF=$(( cyc - AQM_GEFS_FILE_CYC )) - if [ "${GEFS_CYC_DIFF}" -lt "0" ]; then - TSTEPDIFF=$( printf "%02d" $(( 24 + ${GEFS_CYC_DIFF} )) ) + gefs_cyc_diff=$(( cyc - AQM_GEFS_FILE_CYC )) + if [ "${YYYYMMDD}" = "${PDY}" ]; then + tstepdiff=$( printf "%02d" ${gefs_cyc_diff} ) else - TSTEPDIFF=$( printf "%02d" ${GEFS_CYC_DIFF} ) + tstepdiff=$( printf "%02d" $(( 24 + ${gefs_cyc_diff} )) ) fi - AQM_MOFILE_FN="${AQM_GEFS_FILE_PREFIX}.t${AQM_GEFS_FILE_CYC}z.atmf" + aqm_mofile_fn="${AQM_GEFS_FILE_PREFIX}.t${AQM_GEFS_FILE_CYC}z.atmf" if [ "${DO_REAL_TIME}" = "TRUE" ]; then - AQM_MOFILE_FP="${COMINgefs}/gefs.${yyyymmdd}/${AQM_GEFS_FILE_CYC}/chem/sfcsig/${AQM_MOFILE_FN}" + aqm_mofile_fp="${COMINgefs}/gefs.${YYYYMMDD}/${AQM_GEFS_FILE_CYC}/chem/sfcsig/${aqm_mofile_fn}" else - AQM_MOFILE_FP="${DCOMINgefs}/${yyyymmdd}/${AQM_GEFS_FILE_CYC}/${AQM_MOFILE_FN}" + aqm_mofile_fp="${COMINgefs}/${YYYYMMDD}/${AQM_GEFS_FILE_CYC}/${aqm_mofile_fn}" fi # Check if GEFS aerosol files exist for hr in 0 ${LBC_SPEC_FCST_HRS[@]}; do hr_mod=$(( hr + EXTRN_MDL_LBCS_OFFSET_HRS )) fhr=$( printf "%03d" "${hr_mod}" ) - AQM_MOFILE_FHR_FP="${AQM_MOFILE_FP}${fhr}.nemsio" - if [ ! -e "${AQM_MOFILE_FHR_FP}" ]; then - message_txt="The GEFS file (AQM_MOFILE_FHR_FP) for LBCs of \"${cycle}\" does not exist: - AQM_MOFILE_FHR_FP = \"${AQM_MOFILE_FHR_FP}\"" - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - message_warning="WARNING: ${message_txt}" - print_info_msg "${message_warning}" - if [ ! -z "${maillist}" ]; then - echo "${message_warning}" | mail.py $maillist - fi + aqm_mofile_fhr_fp="${aqm_mofile_fp}${fhr}.nemsio" + if [ ! -e "${aqm_mofile_fhr_fp}" ]; then + message_txt="WARNING: The GEFS file (AQM_MOFILE_FHR_FP) for LBCs of \"${cycle}\" does not exist: + aqm_mofile_fhr_fp = \"${aqm_mofile_fhr_fp}\"" + if [ ! -z "${MAILTO}" ] && [ "${MACHINE}" = "WCOSS2" ]; then + echo "${message_txt}" | mail.py $maillist else print_err_msg_exit "${message_txt}" - fi + fi fi done - NUMTS="$(( FCST_LEN_HRS / LBC_SPEC_INTVL_HRS + 1 ))" + numts="$(( FCST_LEN_HRS / LBC_SPEC_INTVL_HRS + 1 ))" cat > gefs2lbc-nemsio.ini <>$pgmout 2>errfile + export err=$?; err_chk print_info_msg " ======================================================================== Successfully added GEFS aerosol LBCs !!! ========================================================================" -# fi + +for hr in 0 ${LBC_SPEC_FCST_HRS[@]}; do + fhr=$( printf "%03d" "${hr}" ) + aqm_lbcs_fn="${aqm_lbcs_fn_prefix}${fhr}.nc" + cpreq -p "${DATA}/${aqm_lbcs_fn}" ${COMOUT} +done # print_info_msg " ======================================================================== diff --git a/scripts/exregional_bias_correction_o3.sh b/scripts/exsrw_bias_correction_o3.sh similarity index 68% rename from scripts/exregional_bias_correction_o3.sh rename to scripts/exsrw_bias_correction_o3.sh index 709cc1957d..1ef4012528 100755 --- a/scripts/exregional_bias_correction_o3.sh +++ b/scripts/exsrw_bias_correction_o3.sh @@ -7,7 +7,7 @@ # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. ${USHsrw}/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_bias_correction_o3" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +17,7 @@ source_config_for_task "cpl_aqm_parm|task_bias_correction_o3" ${GLOBAL_VAR_DEFNS # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; set -xue; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -80,8 +80,8 @@ yyyymm_m1=${PDYm1:0:6} yyyy_m2=${PDYm2:0:4} yyyymm_m2=${PDYm2:0:6} yyyy_m3=${PDYm3:0:4} -yyyymm_m3=${PDYm3:0:6} - +yyyymm_m3=${PDYm3:0:6} + # #----------------------------------------------------------------------- # @@ -103,13 +103,11 @@ fi # STEP 1: Retrieve AIRNOW observation data #----------------------------------------------------------------------------- -mkdir_vrfy -p "${DATA}/data" +mkdir -p "${DATA}/data" -# Retrieve real-time airnow data for the last three days and convert them into netcdf. -# In the following for-loop, pdym stands for previous (m) day of the present day (PDY) -# in the NCO standards, i.e. PDYm1: 1day ago, PDYm2: 2days ago, PDYm3: 3days ago -for i_pdym in {1..3}; do - case $i_pdym in +# Retrieve real-time airnow data for the last three days and convert them into netcdf + for ipdym in {1..3}; do + case $ipdym in 1) cvt_yyyy="${yyyy_m1}" cvt_yyyymm="${yyyymm_m1}" @@ -134,22 +132,22 @@ for i_pdym in {1..3}; do cvt_input_fp="${cvt_input_dir}/YYYY/YYYYMMDD/${cvt_input_fn}" cvt_output_fp="${cvt_output_dir}/YYYY/YYYYMMDD/${cvt_output_fn}" - mkdir_vrfy -p "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}" - mkdir_vrfy -p "${cvt_output_dir}/${cvt_yyyy}/${cvt_pdy}" - cp_vrfy ${DCOMINairnow}/${cvt_pdy}/airnow/HourlyAQObs_${cvt_pdy}*.dat "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}" + mkdir -p "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}" + mkdir -p "${cvt_output_dir}/${cvt_yyyy}/${cvt_pdy}" + + if [ "$(ls -A ${DCOMINairnow}/${cvt_pdy}/airnow)" ]; then + cp ${DCOMINairnow}/${cvt_pdy}/airnow/HourlyAQObs_${cvt_pdy}*.dat "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}" + else + message_warning="WARNING: airnow data missing. skip this date ${cvt_pdy}" + print_info_msg "${message_warning}" + fi PREP_STEP eval ${RUN_CMD_SERIAL} ${EXECdir}/convert_airnow_csv ${cvt_input_fp} ${cvt_output_fp} ${cvt_pdy} ${cvt_pdy} ${REDIRECT_OUT_ERR} export err=$? - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then err_chk - else - if [ $err -ne 0 ]; then - print_err_msg_exit "Call to executable to run CONVERT_AIRNOW_CSV returned with nonzero exit code." - fi - fi POST_STEP -done + done #----------------------------------------------------------------------------- # STEP 2: Extracting PM2.5, O3, and met variables from CMAQ input and outputs @@ -158,7 +156,7 @@ done FCST_LEN_HRS=$( printf "%03d" ${FCST_LEN_HRS} ) ic=1 while [ $ic -lt 120 ]; do - if [ -s ${COMIN}/${NET}.${cycle}.chem_sfc.f${FCST_LEN_HRS}.nc ]; then + if [ -s ${COMIN}/${cyc}/${NET}.${cycle}.chem_sfc.f${FCST_LEN_HRS}.nc ]; then echo "cycle ${cyc} post1 is done!" break else @@ -173,113 +171,88 @@ fi # remove any pre-exit ${NET}.${cycle}.chem_sfc/met_sfc.nc for 2-stage post processing DATA_grid="${DATA}/data/bcdata.${yyyymm}/grid" if [ -d "${DATA_grid}/${cyc}z/${PDY}" ]; then - rm_vrfy -rf "${DATA_grid}/${cyc}z/${PDY}" + rm -rf "${DATA_grid}/${cyc}z/${PDY}" fi -mkdir_vrfy -p "${DATA_grid}/${cyc}z/${PDY}" -ln_vrfy -sf ${COMIN}/${NET}.${cycle}.chem_sfc.*.nc ${DATA_grid}/${cyc}z/${PDY} -ln_vrfy -sf ${COMIN}/${NET}.${cycle}.met_sfc.*.nc ${DATA_grid}/${cyc}z/${PDY} +mkdir -p "${DATA_grid}/${cyc}z/${PDY}" +ln -sf ${COMIN}/${cyc}/${NET}.${cycle}.chem_sfc.*.nc ${DATA_grid}/${cyc}z/${PDY} +ln -sf ${COMIN}/${cyc}/${NET}.${cycle}.met_sfc.*.nc ${DATA_grid}/${cyc}z/${PDY} #----------------------------------------------------------------------------- # STEP 3: Intepolating CMAQ O3 into AIRNow sites #----------------------------------------------------------------------------- -mkdir_vrfy -p ${DATA}/data/coords -mkdir_vrfy -p ${DATA}/data/site-lists.interp -mkdir_vrfy -p ${DATA}/out/ozone/${yyyy} -mkdir_vrfy -p ${DATA}/data/bcdata.${yyyymm}/interpolated/ozone/${yyyy} +mkdir -p ${DATA}/data/coords +mkdir -p ${DATA}/data/site-lists.interp +mkdir -p ${DATA}/out/ozone/${yyyy} +mkdir -p ${DATA}/data/bcdata.${yyyymm}/interpolated/ozone/${yyyy} -cp_vrfy ${PARMaqm_utils}/bias_correction/sites.valid.ozone.20230331.12z.list ${DATA}/data/site-lists.interp -cp_vrfy ${PARMaqm_utils}/bias_correction/aqm.t12z.chem_sfc.f000.nc ${DATA}/data/coords -cp_vrfy ${PARMaqm_utils}/bias_correction/config.interp.ozone.7-vars_${id_domain}.${cyc}z ${DATA} +cp ${PARMdir}/aqm_utils/bias_correction/sites.valid.ozone.20230331.12z.list ${DATA}/data/site-lists.interp +cp ${PARMdir}/aqm_utils/bias_correction/aqm.t12z.chem_sfc.f000.nc ${DATA}/data/coords +cp ${PARMdir}/aqm_utils/bias_correction/config.interp.ozone.7-vars_${id_domain}.${cyc}z ${DATA} PREP_STEP eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_bias_interpolate config.interp.ozone.7-vars_${id_domain}.${cyc}z ${cyc}z ${PDY} ${PDY} ${REDIRECT_OUT_ERR} export err=$? -if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then err_chk -else - if [ $err -ne 0 ]; then - print_err_msg_exit "Call to executable to run AQM_BIAS_INTERPOLATE returned with nonzero exit code." - fi -fi POST_STEP -cp_vrfy ${DATA}/out/ozone/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/ozone/${yyyy} +cp ${DATA}/out/ozone/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/ozone/${yyyy} if [ "${DO_AQM_SAVE_AIRNOW_HIST}" = "TRUE" ]; then - mkdir_vrfy -p ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/ozone/${yyyy} - cp_vrfy ${DATA}/out/ozone/${yyyy}/*nc ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/ozone/${yyyy} - - for i_pdym in {0..3}; do - case $i_pdym in - 0) - cvt_yyyy="${yyyy}" - cvt_yyyymm="${yyyymm}" - cvt_pdy="${PDY}" - ;; - 1) - cvt_yyyy="${yyyy_m1}" - cvt_yyyymm="${yyyymm_m1}" - cvt_pdy="${PDYm1}" - ;; - 2) - cvt_yyyy="${yyyy_m2}" - cvt_yyyymm="${yyyymm_m2}" - cvt_pdy="${PDYm2}" - ;; - 3) - cvt_yyyy="${yyyy_m3}" - cvt_yyyymm="${yyyymm_m3}" - cvt_pdy="${PDYm3}" - ;; - esac - # CSV and NetCDF files - mkdir_vrfy -p ${COMOUTbicor}/bcdata.${cvt_yyyymm}/airnow/csv/${cvt_yyyy}/${cvt_pdy} - mkdir_vrfy -p ${COMOUTbicor}/bcdata.${cvt_yyyymm}/airnow/netcdf/${cvt_yyyy}/${cvt_pdy} - if [ "${i_pdym}" != "0" ]; then - cp_vrfy ${DCOMINairnow}/${cvt_pdy}/airnow/HourlyAQObs_${cvt_pdy}*.dat ${COMOUTbicor}/bcdata.${cvt_yyyymm}/airnow/csv/${cvt_yyyy}/${cvt_pdy} - cp_vrfy ${DATA}/data/bcdata.${cvt_yyyymm}/airnow/netcdf/${cvt_yyyy}/${cvt_pdy}/HourlyAQObs.${cvt_pdy}.nc ${COMOUTbicor}/bcdata.${cvt_yyyymm}/airnow/netcdf/${cvt_yyyy}/${cvt_pdy} - fi - done - mkdir_vrfy -p ${COMOUTbicor}/bcdata.${yyyymm}/grid/${cyc}z/${PDY} - cp_vrfy ${COMIN}/${NET}.${cycle}.*sfc*.nc ${COMOUTbicor}/bcdata.${yyyymm}/grid/${cyc}z/${PDY} + mkdir -p ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/ozone/${yyyy} + cp ${DATA}/out/ozone/${yyyy}/*nc ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/ozone/${yyyy} + + # CSV files + mkdir -p ${COMOUTbicor}/bcdata.${yyyymm}/airnow/csv/${yyyy}/${PDY} + mkdir -p ${COMOUTbicor}/bcdata.${yyyymm_m1}/airnow/csv/${yyyy_m1}/${PDYm1} + mkdir -p ${COMOUTbicor}/bcdata.${yyyymm_m2}/airnow/csv/${yyyy_m2}/${PDYm2} + mkdir -p ${COMOUTbicor}/bcdata.${yyyymm_m3}/airnow/csv/${yyyy_m3}/${PDYm3} + cp ${DCOMINairnow}/${PDYm1}/airnow/HourlyAQObs_${PDYm1}*.dat ${COMOUTbicor}/bcdata.${yyyymm_m1}/airnow/csv/${yyyy_m1}/${PDYm1} + cp ${DCOMINairnow}/${PDYm2}/airnow/HourlyAQObs_${PDYm2}*.dat ${COMOUTbicor}/bcdata.${yyyymm_m2}/airnow/csv/${yyyy_m2}/${PDYm2} + cp ${DCOMINairnow}/${PDYm3}/airnow/HourlyAQObs_${PDYm3}*.dat ${COMOUTbicor}/bcdata.${yyyymm_m3}/airnow/csv/${yyyy_m3}/${PDYm3} + + # NetCDF files + mkdir -p ${COMOUTbicor}/bcdata.${yyyymm}/airnow/netcdf/${yyyy}/${PDY} + mkdir -p ${COMOUTbicor}/bcdata.${yyyymm_m1}/airnow/netcdf/${yyyy_m1}/${PDYm1} + mkdir -p ${COMOUTbicor}/bcdata.${yyyymm_m2}/airnow/netcdf/${yyyy_m2}/${PDYm2} + mkdir -p ${COMOUTbicor}/bcdata.${yyyymm_m3}/airnow/netcdf/${yyyy_m3}/${PDYm3} + cp ${DATA}/data/bcdata.${yyyymm_m1}/airnow/netcdf/${yyyy_m1}/${PDYm1}/HourlyAQObs.${PDYm1}.nc ${COMOUTbicor}/bcdata.${yyyymm_m1}/airnow/netcdf/${yyyy_m1}/${PDYm1} + cp ${DATA}/data/bcdata.${yyyymm_m2}/airnow/netcdf/${yyyy_m2}/${PDYm2}/HourlyAQObs.${PDYm2}.nc ${COMOUTbicor}/bcdata.${yyyymm_m2}/airnow/netcdf/${yyyy_m2}/${PDYm2} + cp ${DATA}/data/bcdata.${yyyymm_m3}/airnow/netcdf/${yyyy_m3}/${PDYm3}/HourlyAQObs.${PDYm3}.nc ${COMOUTbicor}/bcdata.${yyyymm_m3}/airnow/netcdf/${yyyy_m3}/${PDYm3} + + mkdir -p "${COMOUTbicor}/bcdata.${yyyymm}/grid/${cyc}z/${PDY}" + cp ${COMIN}/${cyc}/${NET}.${cycle}.*_sfc.f*.nc ${COMOUTbicor}/bcdata.${yyyymm}/grid/${cyc}z/${PDY} fi #----------------------------------------------------------------------------- # STEP 4: Performing Bias Correction for Ozone #----------------------------------------------------------------------------- -rm_vrfy -rf ${DATA}/data/bcdata* +rm -rf ${DATA}/data/bcdata* -ln_vrfy -sf ${COMINbicor}/bcdata* "${DATA}/data" +ln -sf ${COMINbicor}/bcdata* "${DATA}/data" -mkdir_vrfy -p ${DATA}/data/sites -cp_vrfy ${PARMaqm_utils}/bias_correction/config.ozone.bias_corr_${id_domain}.${cyc}z ${DATA} +mkdir -p ${DATA}/data/sites +cp ${PARMdir}/aqm_utils/bias_correction/config.ozone.bias_corr_${id_domain}.${cyc}z ${DATA} PREP_STEP eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_bias_correct config.ozone.bias_corr_${id_domain}.${cyc}z ${cyc}z ${BC_STDAY} ${PDY} ${REDIRECT_OUT_ERR} export err=$? -if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then err_chk -else - if [ $err -ne 0 ]; then - print_err_msg_exit "Call to executable to run AQM_BIAS_CORRECT returned with nonzero exit code." - fi -fi POST_STEP -cp_vrfy ${DATA}/out/ozone.corrected* ${COMIN} +cp ${DATA}/out/ozone.corrected* ${COMOUT} if [ "${cyc}" = "12" ]; then - cp_vrfy ${DATA}/data/sites/sites.valid.ozone.${PDY}.${cyc}z.list ${DATA} + cp ${DATA}/data/sites/sites.valid.ozone.${PDY}.${cyc}z.list ${DATA} fi #----------------------------------------------------------------------------- # STEP 5: converting netcdf to grib format #----------------------------------------------------------------------------- -ln_vrfy -sf ${COMIN}/ozone.corrected.${PDY}.${cyc}z.nc . +ln -sf ${COMIN}/${cyc}/ozone.corrected.${PDY}.${cyc}z.nc . # cat >bias_cor.ini < filesize export XLFRTEOPTS="unit_vars=yes" @@ -408,11 +369,11 @@ EOF1 export FORT12="filesize" export FORT31= export FORT51=awpaqm.${cycle}.${hr}ho3-max-bc.227.grib2 - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm-${hr}hro3-maxi.${cycle}.227 + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm-${hr}hro3-maxi.${cycle}.227 done # Post Files to COMOUTwmo - cp_vrfy awpaqm.${cycle}.*o3-max-bc.227.grib2 ${COMOUTwmo} + cp awpaqm.${cycle}.*o3-max-bc.227.grib2 ${COMOUTwmo} # Distribute Data if [ "${SENDDBN_NTC}" = "TRUE" ] ; then @@ -423,13 +384,13 @@ EOF1 fi #------------------------------------- -rm_vrfy -rf tmpfile +rm -rf tmpfile fhr=01 while [ "${fhr}" -le "${FCST_LEN_HRS}" ]; do fhr3d=$( printf "%03d" "${fhr}" ) - cp_vrfy ${DATA}/${NET}.${cycle}.awpozcon_bc.f${fhr3d}.${id_domain}.grib2 ${COMOUT} + cp ${DATA}/${NET}.${cycle}.awpozcon_bc.f${fhr3d}.${id_domain}.grib2 ${COMOUT} # create GRIB file to convert to grid 227 then to GRIB2 for NDFD cat ${DATA}/${NET}.${cycle}.awpozcon_bc.f${fhr3d}.${id_domain}.grib2 >> tmpfile @@ -453,13 +414,13 @@ newgrib2file2=${NET}.${cycle}.ave_8hr_o3_bc.227.grib2 grid227="lambert:265.0000:25.0000:25.0000 226.5410:1473:5079.000 12.1900:1025:5079.000" wgrib2 tmpfile.1hr -set_grib_type c3b -new_grid_winds earth -new_grid ${grid227} ${newgrib2file1} -cp_vrfy tmpfile.1hr ${COMOUT}/${NET}.${cycle}.ave_1hr_o3_bc.${id_domain}.grib2 -cp_vrfy ${NET}.${cycle}.ave_1hr_o3_bc.227.grib2 ${COMOUT} +cp tmpfile.1hr ${COMOUT}/${NET}.${cycle}.ave_1hr_o3_bc.${id_domain}.grib2 +cp ${NET}.${cycle}.ave_1hr_o3_bc.227.grib2 ${COMOUT} if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then wgrib2 tmpfile.8hr -set_grib_type c3b -new_grid_winds earth -new_grid ${grid227} ${newgrib2file2} - cp_vrfy tmpfile.8hr ${COMOUT}/${NET}.${cycle}.ave_8hr_o3_bc.${id_domain}.grib2 - cp_vrfy ${NET}.${cycle}.ave_8hr_o3_bc.227.grib2 ${COMOUT} + cp tmpfile.8hr ${COMOUT}/${NET}.${cycle}.ave_8hr_o3_bc.${id_domain}.grib2 + cp ${NET}.${cycle}.ave_8hr_o3_bc.227.grib2 ${COMOUT} fi if [ "${SENDDBN}" = "TRUE" ] ; then @@ -482,7 +443,7 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT12="filesize" export FORT31= export FORT51=grib2.${cycle}.awpcsozcon_aqm_${hr}-bc.temp - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_ave_${hr}hr_o3_bc-awpozcon.${cycle}.227 + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_ave_${hr}hr_o3_bc-awpozcon.${cycle}.227 echo `ls -l grib2.${cycle}.awpcsozcon_aqm_${hr}-bc.temp | awk '{print $5} '` > filesize export XLFRTEOPTS="unit_vars=yes" @@ -490,7 +451,7 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT12="filesize" export FORT31= export FORT51=awpaqm.${cycle}.${hr}ho3-bc.227.grib2 - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_ave_${hr}hr_o3_bc-awpozcon.${cycle}.227 + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_ave_${hr}hr_o3_bc-awpozcon.${cycle}.227 # Create AWIPS GRIB data for dailly 1-hr and 8hr max ozone echo 0 > filesize @@ -499,7 +460,7 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT12="filesize" export FORT31= export FORT51=${NET}.${cycle}.max_${hr}hr_o3-bc.227.grib2.temp - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm-${hr}hro3_bc-maxi.${cycle}.227 + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm-${hr}hro3_bc-maxi.${cycle}.227 echo `ls -l ${NET}.${cycle}.max_${hr}hr_o3-bc.227.grib2.temp | awk '{print $5} '` > filesize export XLFRTEOPTS="unit_vars=yes" @@ -507,11 +468,11 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT12="filesize" export FORT31= export FORT51=awpaqm.${cycle}.${hr}ho3-max-bc.227.grib2 - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm-${hr}hro3_bc-maxi.${cycle}.227 + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm-${hr}hro3_bc-maxi.${cycle}.227 # Post Files to COMOUTwmo - cp_vrfy awpaqm.${cycle}.${hr}ho3-bc.227.grib2 ${COMOUTwmo} - cp_vrfy awpaqm.${cycle}.${hr}ho3-max-bc.227.grib2 ${COMOUTwmo} + cp awpaqm.${cycle}.${hr}ho3-bc.227.grib2 ${COMOUTwmo} + cp awpaqm.${cycle}.${hr}ho3-max-bc.227.grib2 ${COMOUTwmo} # Distribute Data if [ "${SENDDBN}" = "TRUE" ]; then @@ -520,7 +481,6 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then fi done fi - # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_bias_correction_pm25.sh b/scripts/exsrw_bias_correction_pm25.sh similarity index 68% rename from scripts/exregional_bias_correction_pm25.sh rename to scripts/exsrw_bias_correction_pm25.sh index 9503f744c9..ae1a2d6f65 100755 --- a/scripts/exregional_bias_correction_pm25.sh +++ b/scripts/exsrw_bias_correction_pm25.sh @@ -7,7 +7,7 @@ # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. ${USHsrw}/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_bias_correction_pm25" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +17,7 @@ source_config_for_task "cpl_aqm_parm|task_bias_correction_pm25" ${GLOBAL_VAR_DEF # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; set -xue; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -103,13 +103,11 @@ fi # STEP 1: Retrieve AIRNOW observation data #----------------------------------------------------------------------------- -mkdir_vrfy -p "${DATA}/data" +mkdir -p "${DATA}/data" -# Retrieve real-time airnow data for the last three days. -# In the following for-loop, pdym stands for previous (m) day of the present day (PDY) -# in the NCO standards, i.e. PDYm1: 1day ago, PDYm2: 2days ago, PDYm3: 3days ago -for i_pdym in {1..3}; do - case $i_pdym in +# Retrieve real-time airnow data for the last three days + for ipdym in {1..3}; do + case $ipdym in 1) cvt_yyyy="${yyyy_m1}" cvt_yyyymm="${yyyymm_m1}" @@ -134,22 +132,21 @@ for i_pdym in {1..3}; do cvt_input_fp="${cvt_input_dir}/YYYY/YYYYMMDD/${cvt_input_fn}" cvt_output_fp="${cvt_output_dir}/YYYY/YYYYMMDD/${cvt_output_fn}" - mkdir_vrfy -p "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}" - mkdir_vrfy -p "${cvt_output_dir}/${cvt_yyyy}/${cvt_pdy}" - cp_vrfy ${DCOMINairnow}/${cvt_pdy}/airnow/HourlyAQObs_${cvt_pdy}*.dat "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}" - + mkdir -p "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}" + mkdir -p "${cvt_output_dir}/${cvt_yyyy}/${cvt_pdy}" + if [ "$(ls -A ${DCOMINairnow}/${cvt_pdy}/airnow)" ]; then + cp ${DCOMINairnow}/${cvt_pdy}/airnow/HourlyAQObs_${cvt_pdy}*.dat "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}" + else + message_warning="WARNING: airnow data missing. skip this date ${cvt_pdy}" + print_info_msg "${message_warning}" + fi + PREP_STEP eval ${RUN_CMD_SERIAL} ${EXECdir}/convert_airnow_csv ${cvt_input_fp} ${cvt_output_fp} ${cvt_pdy} ${cvt_pdy} ${REDIRECT_OUT_ERR} export err=$? - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then err_chk - else - if [ $err -ne 0 ]; then - print_err_msg_exit "Call to executable to run CONVERT_AIRNOW_CSV returned with nonzero exit code." - fi - fi POST_STEP -done + done #----------------------------------------------------------------------------- # STEP 2: Extracting PM2.5, O3, and met variables from CMAQ input and outputs @@ -158,7 +155,7 @@ done FCST_LEN_HRS=$( printf "%03d" ${FCST_LEN_HRS} ) ic=1 while [ $ic -lt 120 ]; do - if [ -s ${COMIN}/${NET}.${cycle}.chem_sfc.f${FCST_LEN_HRS}.nc ]; then + if [ -s ${COMIN}/${cyc}/${NET}.${cycle}.chem_sfc.f${FCST_LEN_HRS}.nc ]; then echo "cycle ${cyc} post1 is done!" break else @@ -173,82 +170,70 @@ fi # remove any pre-exit ${NET}.${cycle}.chem_sfc/met_sfc.nc for 2-stage post processing DATA_grid="${DATA}/data/bcdata.${yyyymm}/grid" if [ -d "${DATA_grid}/${cyc}z/${PDY}" ]; then - rm_vrfy -rf "${DATA_grid}/${cyc}z/${PDY}" + rm -rf "${DATA_grid}/${cyc}z/${PDY}" fi -mkdir_vrfy -p "${DATA_grid}/${cyc}z/${PDY}" -ln_vrfy -sf ${COMIN}/${NET}.${cycle}.chem_sfc.*.nc ${DATA_grid}/${cyc}z/${PDY} -ln_vrfy -sf ${COMIN}/${NET}.${cycle}.met_sfc.*.nc ${DATA_grid}/${cyc}z/${PDY} +mkdir -p "${DATA_grid}/${cyc}z/${PDY}" +ln -sf ${COMIN}/${cyc}/${NET}.${cycle}.chem_sfc.*.nc ${DATA_grid}/${cyc}z/${PDY} +ln -sf ${COMIN}/${cyc}/${NET}.${cycle}.met_sfc.*.nc ${DATA_grid}/${cyc}z/${PDY} #----------------------------------------------------------------------- # STEP 3: Intepolating CMAQ PM2.5 into AIRNow sites #----------------------------------------------------------------------- -mkdir_vrfy -p ${DATA}/data/coords -mkdir_vrfy -p ${DATA}/data/site-lists.interp -mkdir_vrfy -p ${DATA}/out/pm25/${yyyy} -mkdir_vrfy -p ${DATA}/data/bcdata.${yyyymm}/interpolated/pm25/${yyyy} +mkdir -p ${DATA}/data/coords +mkdir -p ${DATA}/data/site-lists.interp +mkdir -p ${DATA}/out/pm25/${yyyy} +mkdir -p ${DATA}/data/bcdata.${yyyymm}/interpolated/pm25/${yyyy} -cp_vrfy ${PARMaqm_utils}/bias_correction/sites.valid.pm25.20230331.12z.list ${DATA}/data/site-lists.interp -cp_vrfy ${PARMaqm_utils}/bias_correction/aqm.t12z.chem_sfc.f000.nc ${DATA}/data/coords -cp_vrfy ${PARMaqm_utils}/bias_correction/config.interp.pm2.5.5-vars_${id_domain}.${cyc}z ${DATA} +cp ${PARMdir}/aqm_utils/bias_correction/sites.valid.pm25.20230331.12z.list ${DATA}/data/site-lists.interp +cp ${PARMdir}/aqm_utils/bias_correction/aqm.t12z.chem_sfc.f000.nc ${DATA}/data/coords +cp ${PARMdir}/aqm_utils/bias_correction/config.interp.pm2.5.5-vars_${id_domain}.${cyc}z ${DATA} PREP_STEP eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_bias_interpolate config.interp.pm2.5.5-vars_${id_domain}.${cyc}z ${cyc}z ${PDY} ${PDY} ${REDIRECT_OUT_ERR} export err=$? -if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then err_chk -else - if [ $err -ne 0 ]; then - print_err_msg_exit "Call to executable to run CONVERT_AIRNOW_CSV returned with nonzero exit code." - fi -fi POST_STEP -cp_vrfy ${DATA}/out/pm25/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/pm25/${yyyy} +cp ${DATA}/out/pm25/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/pm25/${yyyy} if [ "${DO_AQM_SAVE_AIRNOW_HIST}" = "TRUE" ]; then - mkdir_vrfy -p ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/pm25/${yyyy} - cp_vrfy ${DATA}/out/pm25/${yyyy}/*nc ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/pm25/${yyyy} +mkdir -p ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/pm25/${yyyy} +cp ${DATA}/out/pm25/${yyyy}/*nc ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/pm25/${yyyy} fi #----------------------------------------------------------------------- # STEP 4: Performing Bias Correction for PM2.5 #----------------------------------------------------------------------- -rm_vrfy -rf ${DATA}/data/bcdata* +rm -rf ${DATA}/data/bcdata* -ln_vrfy -sf ${COMINbicor}/bcdata* "${DATA}/data" +ln -sf ${COMINbicor}/bcdata* "${DATA}/data" -mkdir_vrfy -p ${DATA}/data/sites +mkdir -p ${DATA}/data/sites -cp_vrfy ${PARMaqm_utils}/bias_correction/config.pm2.5.bias_corr_${id_domain}.${cyc}z ${DATA} -cp_vrfy ${PARMaqm_utils}/bias_correction/site_blocking.pm2.5.2021.0427.2-sites.txt ${DATA} -cp_vrfy ${PARMaqm_utils}/bias_correction/bias_thresholds.pm2.5.2015.1030.32-sites.txt ${DATA} +cp ${PARMdir}/aqm_utils/bias_correction/config.pm2.5.bias_corr_${id_domain}.${cyc}z ${DATA} +cp ${PARMdir}/aqm_utils/bias_correction/site_blocking.pm2.5.2021.0427.2-sites.txt ${DATA} +cp ${PARMdir}/aqm_utils/bias_correction/bias_thresholds.pm2.5.2015.1030.32-sites.txt ${DATA} PREP_STEP eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_bias_correct config.pm2.5.bias_corr_${id_domain}.${cyc}z ${cyc}z ${BC_STDAY} ${PDY} ${REDIRECT_OUT_ERR} export err=$? -if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then err_chk -else - if [ $err -ne 0 ]; then - print_err_msg_exit "Call to executable to run AQM_BIAS_CORRECT returned with nonzero exit code." - fi -fi POST_STEP -cp_vrfy $DATA/out/pm2.5.corrected* ${COMIN} +cp $DATA/out/pm2.5.corrected* ${COMOUT} if [ "${cyc}" = "12" ]; then - cp_vrfy ${DATA}/data/sites/sites.valid.pm25.${PDY}.${cyc}z.list ${DATA} + cp ${DATA}/data/sites/sites.valid.pm25.${PDY}.${cyc}z.list ${DATA} fi #------------------------------------------------------------------------ # STEP 5: converting netcdf to grib format #------------------------------------------------------------------------ -ln_vrfy -sf ${COMIN}/pm2.5.corrected.${PDY}.${cyc}z.nc . +ln -sf ${COMIN}/${cyc}/pm2.5.corrected.${PDY}.${cyc}z.nc . # convert from netcdf to grib2 format cat >bias_cor.ini < filesize export XLFRTEOPTS="unit_vars=yes" @@ -412,17 +382,17 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT12="filesize" export FORT31= export FORT51=awpaqm.${cycle}.1hpm25-bc.227.grib2 - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_pm25_bc.${cycle}.227 + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_pm25_bc.${cycle}.227 #################################################### - rm_vrfy -f filesize + rm -f filesize echo 0 > filesize export XLFRTEOPTS="unit_vars=yes" export FORT11=${NET}.${cycle}.max_1hr_pm25_bc.227.grib2 export FORT12="filesize" export FORT31= export FORT51=${NET}.${cycle}.max_1hr_pm25_bc.227.grib2.temp - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_max_1hr_pm25_bc.${cycle}.227 + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_max_1hr_pm25_bc.${cycle}.227 echo `ls -l ${NET}.${cycle}.max_1hr_pm25_bc.227.grib2.temp | awk '{print $5} '` > filesize export XLFRTEOPTS="unit_vars=yes" @@ -430,9 +400,9 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT12="filesize" export FORT31= export FORT51=awpaqm.${cycle}.daily-1hr-pm25-max-bc.227.grib2 - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_max_1hr_pm25_bc.${cycle}.227 + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_max_1hr_pm25_bc.${cycle}.227 - rm_vrfy -f filesize + rm -f filesize # daily_24hr_ave_PM2.5 echo 0 > filesize export XLFRTEOPTS="unit_vars=yes" @@ -440,7 +410,7 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT12="filesize" export FORT31= export FORT51=${NET}.${cycle}.ave_24hr_pm25_bc.227.grib2.temp - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_ave_24hrpm25_bc_awp.${cycle}.227 + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_ave_24hrpm25_bc_awp.${cycle}.227 echo `ls -l ${NET}.${cycle}.ave_24hr_pm25_bc.227.grib2.temp | awk '{print $5} '` > filesize export XLFRTEOPTS="unit_vars=yes" @@ -448,21 +418,20 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT12="filesize" export FORT31= export FORT51=awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2 - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_ave_24hrpm25_bc_awp.${cycle}.227 + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_ave_24hrpm25_bc_awp.${cycle}.227 # Post Files to COMOUTwmo - cp_vrfy awpaqm.${cycle}.1hpm25-bc.227.grib2 ${COMOUTwmo} - cp_vrfy awpaqm.${cycle}.daily-1hr-pm25-max-bc.227.grib2 ${COMOUTwmo} - cp_vrfy awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2 ${COMOUTwmo} + cp awpaqm.${cycle}.1hpm25-bc.227.grib2 ${COMOUTwmo} + cp awpaqm.${cycle}.daily-1hr-pm25-max-bc.227.grib2 ${COMOUTwmo} + cp awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2 ${COMOUTwmo} # Distribute Data if [ "${SENDDBN_NTC}" = "TRUE" ] ; then - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUT}/awpaqm.${cycle}.1hpm25-bc.227.grib2 - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUT}/awpaqm.${cycle}.daily-1hr-pm25-max-bc.227.grib2 - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUT}/awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2 + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.1hpm25-bc.227.grib2 + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.daily-1hr-pm25-max-bc.227.grib2 + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2 fi fi - # #----------------------------------------------------------------------- # diff --git a/scripts/exsrw_fire_emission.sh b/scripts/exsrw_fire_emission.sh new file mode 100755 index 0000000000..68178016e7 --- /dev/null +++ b/scripts/exsrw_fire_emission.sh @@ -0,0 +1,167 @@ +#!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +. ${USHsrw}/source_util_funcs.sh +source_config_for_task "cpl_aqm_parm|task_fire_emission" ${GLOBAL_VAR_DEFNS_FP} +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# +{ save_shell_opts; set -xue; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the ex-script for the task that fetches fire emission +data files from disk or generates model-ready RAVE emission file from raw +data files. +========================================================================" +# +#----------------------------------------------------------------------- +# +# Set up variables for call to retrieve_data.py +# +#----------------------------------------------------------------------- +# +YYYYMMDD=${FIRE_FILE_CDATE:0:8} +HH=${FIRE_FILE_CDATE:8:2} + +CDATE_mh1=`$NDATE -1 ${YYYYMMDD}${HH}` +yyyymmdd_mh1=${CDATE_mh1:0:8} +hh_mh1=${CDATE_mh1:8:2} +# +#----------------------------------------------------------------------- +# +# Retrieve fire file to FIRE_EMISSION_STAGING_DIR +# +#----------------------------------------------------------------------- +# +aqm_fire_file_fn="${AQM_FIRE_FILE_PREFIX}_${YYYYMMDD}_t${HH}z${AQM_FIRE_FILE_SUFFIX}" + +# Check if the fire file exists in the designated directory +if [ -e "${COMINfire}/${aqm_fire_file_fn}" ]; then + cpreq "${COMINfire}/${aqm_fire_file_fn}" ${COMOUT} +else + # Copy raw data + for ihr in {0..23}; do + download_time=`$NDATE -$ihr ${yyyymmdd_mh1}${hh_mh1}` + FILE_curr="Hourly_Emissions_13km_${download_time}00_${download_time}00.nc" + FILE_13km="RAVE-HrlyEmiss-13km_v*_blend_s${download_time}00000_e${download_time}59590_c*.nc" + yyyymmdd_dn="${download_time:0:8}" + hh_dn="${download_time:8:2}" + missing_download_time=`$NDATE -24 ${yyyymmdd_dn}${hh_dn}` + yyyymmdd_dn_md1="${missing_download_time:0:8}" + FILE_13km_md1="RAVE-HrlyEmiss-13km_v*_blend_s${missing_download_time}00000_e${missing_download_time}59590_c*.nc" + if [ -s `ls ${COMINfire}/${yyyymmdd_dn}/rave/${FILE_13km}` ] && [ $(stat -c %s `ls ${COMINfire}/${yyyymmdd_dn}/rave/${FILE_13km}`) -gt 4000000 ]; then + cpreq -p ${COMINfire}/${yyyymmdd_dn}/rave/${FILE_13km} ${FILE_curr} + elif [ -s `ls ${COMINfire}/${yyyymmdd_dn_md1}/rave/${FILE_13km_md1}` ] && [ $(stat -c %s `ls ${COMINfire}/${yyyymmdd_dn_md1}/rave/${FILE_13km_md1}`) -gt 4000000 ]; then + echo "WARNING: ${FILE_13km} does not exist or broken. Replacing with the file of previous date ..." + cpreq -p ${COMINfire}/${yyyymmdd_dn_md1}/rave/${FILE_13km_md1} ${FILE_curr} + else + message_txt="WARNING Fire Emission RAW data does not exist or broken: + FILE_13km_md1 = \"${FILE_13km_md1}\" + DCOMINfire = \"${DCOMINfire}\"" + + cpreq -p ${FIXaqm}/fire/Hourly_Emissions_13km_dummy.nc ${FILE_curr} + print_info_msg "WARNING: ${message_txt}. Replacing with the dummy file :: AQM RUN SOFT FAILED." + fi + done + + ncks -O -h --mk_rec_dmn time Hourly_Emissions_13km_${download_time}00_${download_time}00.nc temp.nc + export err=$? + if [ $err -ne 0 ]; then + message_txt="Call to NCKS returned with nonzero exit code." + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" + fi + + mv temp.nc Hourly_Emissions_13km_${download_time}00_${download_time}00.nc + + ncrcat -h Hourly_Emissions_13km_*.nc Hourly_Emissions_13km_${YYYYMMDD}0000_${YYYYMMDD}2300.t${HH}z.nc + export err=$? + if [ $err -ne 0 ]; then + message_txt="Call to NCRCAT returned with nonzero exit code." + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" + fi + + input_fire="${DATA}/Hourly_Emissions_13km_${YYYYMMDD}0000_${YYYYMMDD}2300.t${HH}z.nc" + output_fire="${DATA}/Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_new24.t${HH}z.nc" + + ${USHsrw}/aqm_utils_python/RAVE_remake.allspecies.aqmna13km.g793.py --date "${YYYYMMDD}" --cyc "${HH}" --input_fire "${input_fire}" --output_fire "${output_fire}" + export err=$? + if [ $err -ne 0 ]; then + message_txt="Call to python script \"RAVE_remake.allspecies.py\" returned with nonzero exit code." + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" + fi + + ncks --mk_rec_dmn Time Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_new24.t${HH}z.nc -o Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24.nc + export err=$? + if [ $err -ne 0 ]; then + message_txt="Call to NCKS returned with nonzero exit code." + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" + fi + + cpreq Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24.nc Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24_1.nc + cpreq Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24.nc Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24_2.nc + + ncrcat -O -D 2 Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24.nc Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24_1.nc Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24_2.nc ${aqm_fire_file_fn} + export err=$? + if [ $err -ne 0 ]; then + message_txt="Call to NCRCAT returned with nonzero exit code." + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" + fi + + mv ${aqm_fire_file_fn} temp.nc + ncrename -v PM2.5,PM25 temp.nc temp1.nc + ncap2 -s 'where(Latitude > 30 && Latitude <=49 && land_cover == 1 ) PM25 = PM25 * 0.44444' temp1.nc temp2.nc + ncap2 -s 'where(Latitude <=30 && land_cover == 1 ) PM25 = PM25 * 0.8' temp2.nc temp3.nc + ncap2 -s 'where(Latitude <=49 && land_cover == 3 ) PM25 = PM25 * 1.11111' temp3.nc temp4.nc + ncap2 -s 'where(Latitude <=49 && land_cover == 4 ) PM25 = PM25 * 1.11111' temp4.nc temp5.nc + ncrename -v PM25,PM2.5 temp5.nc temp6.nc + mv temp6.nc ${aqm_fire_file_fn} + + # Copy the final fire emission file to data share directory + cpreq "${DATA}/${aqm_fire_file_fn}" ${COMOUT} +fi +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/function. +# +#----------------------------------------------------------------------- +# +{ restore_shell_opts; } > /dev/null 2>&1 diff --git a/scripts/exregional_nexus_emission.sh b/scripts/exsrw_nexus_emission.sh similarity index 63% rename from scripts/exregional_nexus_emission.sh rename to scripts/exsrw_nexus_emission.sh index d1153d95b7..7edd18ce42 100755 --- a/scripts/exregional_nexus_emission.sh +++ b/scripts/exsrw_nexus_emission.sh @@ -7,7 +7,7 @@ # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. ${USHsrw}/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_nexus_emission" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +17,7 @@ source_config_for_task "cpl_aqm_parm|task_nexus_emission" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; set -xue; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -42,7 +42,7 @@ print_info_msg " Entering script: \"${scrfunc_fn}\" In directory: \"${scrfunc_dir}\" -This is the ex-script for the task that runs NEXUS. +This is the ex-script for the task that runs NEXUS EMISSION. ========================================================================" # #----------------------------------------------------------------------- @@ -75,13 +75,12 @@ fi # #----------------------------------------------------------------------- # -# Move to the NEXUS working directory +# Create NEXUS input directory in working directory # #----------------------------------------------------------------------- # DATAinput="${DATA}/input" -mkdir_vrfy -p "$DATAinput" - +mkdir -p "$DATAinput" # #----------------------------------------------------------------------- # @@ -90,19 +89,13 @@ mkdir_vrfy -p "$DATAinput" #----------------------------------------------------------------------- # USE_GFS_SFC="FALSE" -if [ "${RUN_ENVIR}" = "nco" ]; then - GFS_SFC_INPUT="${DATAROOT}/nexus_gfs_sfc.${share_pid}" -else - GFS_SFC_INPUT="${COMIN}/GFS_SFC" -fi - +GFS_SFC_INPUT="${DATA_SHARE}" if [ -d "${GFS_SFC_INPUT}" ]; then - if [ "$(ls -A ${GFS_SFC_INPUT})" ]; then + if [ "$(ls -A ${GFS_SFC_INPUT}/gfs*.nc)" ]; then ln -sf "${GFS_SFC_INPUT}" "GFS_SFC" USE_GFS_SFC="TRUE" fi fi - # #----------------------------------------------------------------------- # @@ -110,14 +103,12 @@ fi # #----------------------------------------------------------------------- # -cp_vrfy ${EXECdir}/nexus ${DATA} -cp_vrfy ${NEXUS_FIX_DIR}/${NEXUS_GRID_FN} ${DATA}/grid_spec.nc - +cpreq ${FIXaqm}/nexus/${NEXUS_GRID_FN} ${DATA}/grid_spec.nc if [ "${USE_GFS_SFC}" = "TRUE" ]; then - cp_vrfy ${ARL_NEXUS_DIR}/config/cmaq_gfs_megan/*.rc ${DATA} + cpreq ${PARMsrw}/nexus_config/cmaq_gfs_megan/*.rc ${DATA} else - cp_vrfy ${ARL_NEXUS_DIR}/config/cmaq/*.rc ${DATA} + cpreq ${PARMsrw}/nexus_config/cmaq/*.rc ${DATA} fi # #----------------------------------------------------------------------- @@ -127,10 +118,10 @@ fi # #----------------------------------------------------------------------- # -mm="${PDY:4:2}" -dd="${PDY:6:2}" -hh="${cyc}" -yyyymmdd="${PDY}" +MM="${PDY:4:2}" +DD="${PDY:6:2}" +HH="${cyc}" +YYYYMMDD="${PDY}" NUM_SPLIT_NEXUS=$( printf "%02d" ${NUM_SPLIT_NEXUS} ) @@ -141,28 +132,33 @@ if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then fi if [ "${NUM_SPLIT_NEXUS}" = "01" ]; then - start_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC" "+%Y%m%d%H" ) - end_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${FCST_LEN_HRS} hours" "+%Y%m%d%H" ) + start_date="${YYYYMMDD}${HH}" + end_date=`$NDATE +${FCST_LEN_HRS} ${YYYYMMDD}${HH}` else len_per_split=$(( FCST_LEN_HRS / NUM_SPLIT_NEXUS )) nsptp=$(( nspt+1 )) # Compute start and end dates for nexus split option start_del_hr=$(( len_per_split * nspt )) - start_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${start_del_hr} hours " "+%Y%m%d%H" ) + start_date=`$NDATE +${start_del_hr} ${YYYYMMDD}${HH}` if [ "${nsptp}" = "${NUM_SPLIT_NEXUS}" ];then - end_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + $(expr $FCST_LEN_HRS + 1) hours" "+%Y%m%d%H" ) + end_date=`$NDATE +$(expr $FCST_LEN_HRS + 1) ${YYYYMMDD}${HH}` else end_del_hr=$(( len_per_split * nsptp )) - end_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + $(expr $end_del_hr + 1) hours" "+%Y%m%d%H" ) + end_del_hr1=$(( $end_del_hr + 1 )) + end_date=`$NDATE +${end_del_hr1} ${YYYYMMDD}${HH}` fi fi # -####################################################################### +#---------------------------------------------------------------------- +# # This will be the section to set the datasets used in $workdir/NEXUS_Config.rc # All Datasets in that file need to be placed here as it will link the files # necessary to that folder. In the future this will be done by a get_nexus_input # script +# +#---------------------------------------------------------------------- +# NEI2016="TRUE" TIMEZONES="TRUE" CEDS="TRUE" @@ -173,148 +169,138 @@ NOAAGMD="TRUE" SOA="TRUE" EDGAR="TRUE" MEGAN="TRUE" -MODIS_XLAI="TRUE" +MODIS_XLAI="FALSE" OLSON_MAP="TRUE" Yuan_XLAI="TRUE" GEOS="TRUE" AnnualScalar="TRUE" - -NEXUS_INPUT_BASE_DIR=${NEXUS_INPUT_DIR} -######################################################################## - +OFFLINE_SOILNOX="TRUE" # #---------------------------------------------------------------------- # # modify time configuration file # -python3 ${ARL_NEXUS_DIR}/utils/python/nexus_time_parser.py -f ${DATA}/HEMCO_sa_Time.rc -s $start_date -e $end_date +#---------------------------------------------------------------------- +# +${USHsrw}/nexus_utils/python/nexus_time_parser.py -f ${DATA}/HEMCO_sa_Time.rc -s $start_date -e $end_date export err=$? if [ $err -ne 0 ]; then message_txt="Call to python script \"nexus_time_parser.py\" failed." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi - # #--------------------------------------------------------------------- # # set the root directory to the temporary directory # -python3 ${ARL_NEXUS_DIR}/utils/python/nexus_root_parser.py -f ${DATA}/NEXUS_Config.rc -d ${DATAinput} +#---------------------------------------------------------------------- +# +${USHsrw}/nexus_utils/python/nexus_root_parser.py -f ${DATA}/NEXUS_Config.rc -d ${DATAinput} export err=$? if [ $err -ne 0 ]; then message_txt="Call to python script \"nexus_root_parser.py\" failed." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi - # #---------------------------------------------------------------------- +# # Get all the files needed (TEMPORARILY JUST COPY FROM THE DIRECTORY) # +#---------------------------------------------------------------------- +# if [ "${NEI2016}" = "TRUE" ]; then #NEI2016 - mkdir_vrfy -p ${DATAinput}/NEI2016v1 - mkdir_vrfy -p ${DATAinput}/NEI2016v1/v2022-07 - mkdir_vrfy -p ${DATAinput}/NEI2016v1/v2022-07/${mm} - python3 ${ARL_NEXUS_DIR}/utils/python/nexus_nei2016_linker.py --src_dir ${NEXUS_INPUT_BASE_DIR} --date ${yyyymmdd} --work_dir ${DATAinput} -v "v2022-07" + mkdir -p ${DATAinput}/NEI2016v1 + mkdir -p ${DATAinput}/NEI2016v1/v2022-07 + mkdir -p ${DATAinput}/NEI2016v1/v2022-07/${MM} + ${USHsrw}/nexus_utils/python/nexus_nei2016_linker.py --src_dir ${FIXemis} --date ${YYYYMMDD} --work_dir ${DATAinput} -v "v2022-07" export err=$? if [ $err -ne 0 ]; then message_txt="Call to python script \"nexus_nei2016_linker.py\" failed." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi - python3 ${ARL_NEXUS_DIR}/utils/python/nexus_nei2016_control_tilefix.py -f ${DATA}/NEXUS_Config.rc -t ${DATA}/HEMCO_sa_Time.rc # -d ${yyyymmdd} + ${USHsrw}/nexus_utils/python/nexus_nei2016_control_tilefix.py -f ${DATA}/NEXUS_Config.rc -t ${DATA}/HEMCO_sa_Time.rc # -d ${yyyymmdd} export err=$? if [ $err -ne 0 ]; then message_txt="Call to python script \"nexus_nei2016_control_tilefix.py\" failed." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi fi if [ "${TIMEZONES}" = "TRUE" ]; then # TIME ZONES - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/TIMEZONES ${DATAinput}/ + ln -sf ${FIXemis}/TIMEZONES ${DATAinput} fi if [ "${MASKS}" = "TRUE" ]; then # MASKS - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/MASKS ${DATAinput}/ + ln -sf ${FIXemis}/MASKS ${DATAinput} fi if [ "${CEDS}" = "TRUE" ]; then #CEDS - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/CEDS ${DATAinput}/ + ln -sf ${FIXemis}/CEDS ${DATAinput} fi if [ "${HTAP2010}" = "TRUE" ]; then #CEDS2014 - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/HTAP ${DATAinput}/ + ln -sf ${FIXemis}/HTAP ${DATAinput} fi if [ "${OMIHTAP}" = "TRUE" ]; then #CEDS2014 - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/OMI-HTAP_2019 ${DATAinput}/ + ln -sf ${FIXemis}/OMI-HTAP_2019 ${DATAinput} fi if [ "${NOAAGMD}" = "TRUE" ]; then #NOAA_GMD - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/NOAA_GMD ${DATAinput}/ + ln -sf ${FIXemis}/NOAA_GMD ${DATAinput} fi if [ "${SOA}" = "TRUE" ]; then #SOA - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/SOA ${DATAinput}/ + ln -sf ${FIXemis}/SOA ${DATAinput} fi if [ "${EDGAR}" = "TRUE" ]; then #EDGARv42 - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/EDGARv42 ${DATAinput}/ + ln -sf ${FIXemis}/EDGARv42 ${DATAinput} fi if [ "${MEGAN}" = "TRUE" ]; then #MEGAN - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/MEGAN ${DATAinput}/ + ln -sf ${FIXemis}/MEGAN ${DATAinput} fi if [ "${OLSON_MAP}" = "TRUE" ]; then #OLSON_MAP - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/OLSON_MAP ${DATAinput}/ + ln -sf ${FIXemis}/OLSON_MAP ${DATAinput} fi if [ "${Yuan_XLAI}" = "TRUE" ]; then #Yuan_XLAI - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/Yuan_XLAI ${DATAinput}/ + ln -sf ${FIXemis}/Yuan_XLAI ${DATAinput} fi if [ "${GEOS}" = "TRUE" ]; then #GEOS - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/GEOS_0.5x0.625 ${DATAinput}/ + ln -sf ${FIXemis}/GEOS_0.5x0.625 ${DATAinput} fi if [ "${AnnualScalar}" = "TRUE" ]; then #ANNUAL_SCALAR - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/AnnualScalar ${DATAinput}/ + ln -sf ${FIXemis}/AnnualScalar ${DATAinput} fi if [ "${MODIS_XLAI}" = "TRUE" ]; then #MODIS_XLAI - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/MODIS_XLAI ${DATAinput}/ + ln -sf ${FIXemis}/MODIS_XLAI ${DATAinput} +fi + +if [ "${OFFLINE_SOILNOX}" = "TRUE" ]; then #OFFLINE_SOILNOX + ln -sf ${FIXemis}/OFFLINE_SOILNOX ${DATAinput} fi if [ "${USE_GFS_SFC}" = "TRUE" ]; then # GFS INPUT - mkdir_vrfy -p ${DATAinput}/GFS_SFC - python3 ${ARL_NEXUS_DIR}/utils/python/nexus_gfs_bio.py -i ${DATA}/GFS_SFC/gfs.t??z.sfcf???.nc -o ${DATA}/GFS_SFC_MEGAN_INPUT.nc + mkdir -p ${DATAinput}/GFS_SFC + ${USHsrw}/nexus_utils/python/nexus_gfs_bio.py -i ${DATA}/GFS_SFC/gfs.t??z.sfcf???.nc -o ${DATA}/GFS_SFC_MEGAN_INPUT.nc export err=$? if [ $err -ne 0 ]; then message_txt="Call to python script \"nexus_gfs_bio.py\" failed." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi fi - # #---------------------------------------------------------------------- # @@ -322,18 +308,14 @@ fi # #----------------------------------------------------------------------- # -PREP_STEP -eval ${RUN_CMD_NEXUS} ${EXECdir}/nexus -c NEXUS_Config.rc -r grid_spec.nc -o NEXUS_Expt_split.nc ${REDIRECT_OUT_ERR} -export err=$? -if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_chk -else - if [ $err -ne 0 ]; then - print_err_msg_exit "Call to execute nexus standalone for the FV3LAM failed." - fi -fi -POST_STEP +export pgm="nexus" +. prep_step +eval ${RUN_CMD_NEXUS} ${EXECdir}/$pgm -c NEXUS_Config.rc -r grid_spec.nc -o NEXUS_Expt_split.nc >>$pgmout 2>${DATA}/errfile +export err=$?; err_chk +if [ $err -ne 0 ]; then + print_err_msg_exit "Call to execute nexus failed." +fi # #----------------------------------------------------------------------- # @@ -341,15 +323,12 @@ POST_STEP # #----------------------------------------------------------------------- # -python3 ${ARL_NEXUS_DIR}/utils/python/make_nexus_output_pretty.py --src ${DATA}/NEXUS_Expt_split.nc --grid ${DATA}/grid_spec.nc -o ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.${nspt}.nc -t ${DATA}/HEMCO_sa_Time.rc +${USHsrw}/nexus_utils/python/make_nexus_output_pretty.py --src ${DATA}/NEXUS_Expt_split.nc --grid ${DATA}/grid_spec.nc -o ${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.${nspt}.nc -t ${DATA}/HEMCO_sa_Time.rc export err=$? if [ $err -ne 0 ]; then message_txt="Call to python script \"make_nexus_output_pretty.py\" failed." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "wcoss2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi # #----------------------------------------------------------------------- diff --git a/scripts/exregional_nexus_gfs_sfc.sh b/scripts/exsrw_nexus_gfs_sfc.sh similarity index 68% rename from scripts/exregional_nexus_gfs_sfc.sh rename to scripts/exsrw_nexus_gfs_sfc.sh index c34d2c30ae..103842d46f 100755 --- a/scripts/exregional_nexus_gfs_sfc.sh +++ b/scripts/exsrw_nexus_gfs_sfc.sh @@ -7,7 +7,7 @@ # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. ${USHsrw}/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_nexus_gfs_sfc" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +17,7 @@ source_config_for_task "cpl_aqm_parm|task_nexus_gfs_sfc" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; set -xue; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -52,10 +52,10 @@ data files from disk or HPSS. # #----------------------------------------------------------------------- # -yyyymmdd=${GFS_SFC_CDATE:0:8} -yyyymm=${GFS_SFC_CDATE:0:6} -yyyy=${GFS_SFC_CDATE:0:4} -hh=${GFS_SFC_CDATE:8:2} +YYYYMMDD=${GFS_SFC_CDATE:0:8} +YYYYMM=${GFS_SFC_CDATE:0:6} +YYYY=${GFS_SFC_CDATE:0:4} +HH=${GFS_SFC_CDATE:8:2} if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then cyc_mod=$(( ${cyc} - ${DATE_FIRST_CYCL:8:2} )) @@ -70,8 +70,8 @@ fcst_len_hrs_offset=$(( FCST_LEN_HRS + TIME_OFFSET_HRS )) # #----------------------------------------------------------------------- # -GFS_SFC_TAR_DIR="${NEXUS_GFS_SFC_ARCHV_DIR}/rh${yyyy}/${yyyymm}/${yyyymmdd}" -GFS_SFC_TAR_SUB_DIR="gfs.${yyyymmdd}/${hh}/atmos" +GFS_SFC_TAR_DIR="${NEXUS_GFS_SFC_ARCHV_DIR}/rh${YYYY}/${YYYYMM}/${YYYYMMDD}" +GFS_SFC_TAR_SUB_DIR="gfs.${YYYYMMDD}/${HH}/atmos" if [ "${DO_REAL_TIME}" = "TRUE" ]; then GFS_SFC_LOCAL_DIR="${COMINgfs}/${GFS_SFC_TAR_SUB_DIR}" @@ -83,40 +83,28 @@ GFS_SFC_DATA_INTVL="3" # copy files from local directory if [ -d ${GFS_SFC_LOCAL_DIR} ]; then - gfs_sfc_fn="gfs.t${hh}z.sfcanl.nc" + gfs_sfc_fn="gfs.t${HH}z.sfcanl.nc" - relative_link_flag="FALSE" gfs_sfc_fp="${GFS_SFC_LOCAL_DIR}/${gfs_sfc_fn}" - create_symlink_to_file target="${gfs_sfc_fp}" symlink="${gfs_sfc_fn}" \ - relative="${relative_link_flag}" + ln -sf ${gfs_sfc_fp} ${DATA_SHARE}/${gfs_sfc_fn} for fhr in $(seq -f "%03g" 0 ${GFS_SFC_DATA_INTVL} ${fcst_len_hrs_offset}); do - gfs_sfc_fn="gfs.t${hh}z.sfcf${fhr}.nc" + gfs_sfc_fn="gfs.t${HH}z.sfcf${fhr}.nc" if [ -e "${GFS_SFC_LOCAL_DIR}/${gfs_sfc_fn}" ]; then gfs_sfc_fp="${GFS_SFC_LOCAL_DIR}/${gfs_sfc_fn}" - create_symlink_to_file target="${gfs_sfc_fp}" symlink="${gfs_sfc_fn}" \ - relative="${relative_link_flag}" + ln -nsf ${gfs_sfc_fp} ${DATA_SHARE}/${gfs_sfc_fn} else message_txt="SFC file for nexus emission for \"${cycle}\" does not exist in the directory: GFS_SFC_LOCAL_DIR = \"${GFS_SFC_LOCAL_DIR}\" gfs_sfc_fn = \"${gfs_sfc_fn}\"" - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - message_warning="WARNING: ${message_txt}" - print_info_msg "${message_warning}" - if [ ! -z "${maillist}" ]; then - echo "${message_warning}" | mail.py $maillist - fi - else - print_err_msg_exit "${message_txt}" - fi + print_err_msg_exit "${message_txt}" fi - done - + done # retrieve files from HPSS else - if [ "${yyyymmdd}" -lt "20220627" ]; then + if [ "${YYYYMMDD}" -lt "20220627" ]; then GFS_SFC_TAR_FN_VER="prod" - elif [ "${yyyymmdd}" -lt "20221129" ]; then + elif [ "${YYYYMMDD}" -lt "20221129" ]; then GFS_SFC_TAR_FN_VER="v16.2" else GFS_SFC_TAR_FN_VER="v16.3" @@ -126,63 +114,51 @@ else GFS_SFC_TAR_FN_SUFFIX_B="gfs_ncb.tar" # Check if the sfcanl file exists in the staging directory - gfs_sfc_tar_fn="${GFS_SFC_TAR_FN_PREFIX}.${yyyymmdd}_${hh}.${GFS_SFC_TAR_FN_SUFFIX_A}" + gfs_sfc_tar_fn="${GFS_SFC_TAR_FN_PREFIX}.${YYYYMMDD}_${HH}.${GFS_SFC_TAR_FN_SUFFIX_A}" gfs_sfc_tar_fp="${GFS_SFC_TAR_DIR}/${gfs_sfc_tar_fn}" - gfs_sfc_fns=("gfs.t${hh}z.sfcanl.nc") - gfs_sfc_fps="./${GFS_SFC_TAR_SUB_DIR}/gfs.t${hh}z.sfcanl.nc" + gfs_sfc_fns=("gfs.t${HH}z.sfcanl.nc") + gfs_sfc_fps="./${GFS_SFC_TAR_SUB_DIR}/gfs.t${HH}z.sfcanl.nc" if [ "${fcst_len_hrs_offset}" -lt "40" ]; then ARCHV_LEN_HRS="${fcst_len_hrs_offset}" else ARCHV_LEN_HRS="39" fi for fhr in $(seq -f "%03g" 0 ${GFS_SFC_DATA_INTVL} ${ARCHV_LEN_HRS}); do - gfs_sfc_fns+="gfs.t${hh}z.sfcf${fhr}.nc" - gfs_sfc_fps+=" ./${GFS_SFC_TAR_SUB_DIR}/gfs.t${hh}z.sfcf${fhr}.nc" + gfs_sfc_fns+="gfs.t${HH}z.sfcf${fhr}.nc" + gfs_sfc_fps+=" ./${GFS_SFC_TAR_SUB_DIR}/gfs.t${HH}z.sfcf${fhr}.nc" done # Retrieve data from A file up to fcst_len_hrs_offset=39 htar -tvf ${gfs_sfc_tar_fp} - PREP_STEP - htar -xvf ${gfs_sfc_tar_fp} ${gfs_sfc_fps} ${REDIRECT_OUT_ERR} + htar -xvf ${gfs_sfc_tar_fp} ${gfs_sfc_fps} export err=$? if [ $err -ne 0 ]; then message_txt="htar file reading operation (\"htar -xvf ...\") failed." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + print_err_msg_exit "${message_txt}" fi - POST_STEP # Retireve data from B file when fcst_len_hrs_offset>=40 if [ "${fcst_len_hrs_offset}" -ge "40" ]; then - gfs_sfc_tar_fn="${GFS_SFC_TAR_FN_PREFIX}.${yyyymmdd}_${hh}.${GFS_SFC_TAR_FN_SUFFIX_B}" + gfs_sfc_tar_fn="${GFS_SFC_TAR_FN_PREFIX}.${YYYYMMDD}_${HH}.${GFS_SFC_TAR_FN_SUFFIX_B}" gfs_sfc_tar_fp="${GFS_SFC_TAR_DIR}/${gfs_sfc_tar_fn}" gfs_sfc_fns=() gfs_sfc_fps="" for fhr in $(seq -f "%03g" 42 ${GFS_SFC_DATA_INTVL} ${fcst_len_hrs_offset}); do - gfs_sfc_fns+="gfs.t${hh}z.sfcf${fhr}.nc" - gfs_sfc_fps+=" ./${GFS_SFC_TAR_SUB_DIR}/gfs.t${hh}z.sfcf${fhr}.nc" + gfs_sfc_fns+="gfs.t${HH}z.sfcf${fhr}.nc" + gfs_sfc_fps+=" ./${GFS_SFC_TAR_SUB_DIR}/gfs.t${HH}z.sfcf${fhr}.nc" done htar -tvf ${gfs_sfc_tar_fp} - PREP_STEP - htar -xvf ${gfs_sfc_tar_fp} ${gfs_sfc_fps} ${REDIRECT_OUT_ERR} + htar -xvf ${gfs_sfc_tar_fp} ${gfs_sfc_fps} export err=$? if [ $err -ne 0 ]; then message_txt="htar file reading operation (\"htar -xvf ...\") failed." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + print_err_msg_exit "${message_txt}" fi - POST_STEP fi # Link retrieved files to staging directory - ln_vrfy -sf ${GFS_SFC_TAR_SUB_DIR}/gfs.*.nc . + ln -sf ${DATA}/${GFS_SFC_TAR_SUB_DIR}/gfs.*.nc ${DATA_SHARE} +fi -fi # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_nexus_post_split.sh b/scripts/exsrw_nexus_post_split.sh similarity index 73% rename from scripts/exregional_nexus_post_split.sh rename to scripts/exsrw_nexus_post_split.sh index 390e0dcce6..3b83dee523 100755 --- a/scripts/exregional_nexus_post_split.sh +++ b/scripts/exsrw_nexus_post_split.sh @@ -7,7 +7,7 @@ # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. ${USHsrw}/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_nexus_post_split" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +17,7 @@ source_config_for_task "cpl_aqm_parm|task_nexus_post_split" ${GLOBAL_VAR_DEFNS_F # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; sex -xue; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -42,7 +42,7 @@ print_info_msg " Entering script: \"${scrfunc_fn}\" In directory: \"${scrfunc_dir}\" -This is the ex-script for the task that runs NEXUS. +This is the ex-script for the task that runs NEXUS POST SPLIT. ========================================================================" # #----------------------------------------------------------------------- @@ -53,10 +53,10 @@ This is the ex-script for the task that runs NEXUS. # eval ${PRE_TASK_CMDS} -mm="${PDY:4:2}" -dd="${PDY:6:2}" -hh="${cyc}" -yyyymmdd="${PDY}" +YYYYMMDD="${PDY}" +MM="${PDY:4:2}" +DD="${PDY:6:2}" +HH="${cyc}" NUM_SPLIT_NEXUS=$( printf "%02d" ${NUM_SPLIT_NEXUS} ) @@ -65,9 +65,8 @@ if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then CYCLE_IDX=$(( ${cyc_mod} / ${INCR_CYCL_FREQ} )) FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]} fi -start_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC" "+%Y%m%d%H" ) -end_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${FCST_LEN_HRS} hours" "+%Y%m%d%H" ) - +start_date=${YYYYMMDD}${HH} +end_date=`$NDATE +${FCST_LEN_HRS} ${YYYYMMDD}${HH}` # #----------------------------------------------------------------------- # @@ -75,25 +74,21 @@ end_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${FCST_LEN_HRS} hou # #----------------------------------------------------------------------- # -cp_vrfy ${ARL_NEXUS_DIR}/config/cmaq/HEMCO_sa_Time.rc ${DATA}/HEMCO_sa_Time.rc +cpreq ${PARMsrw}/nexus_config/cmaq/HEMCO_sa_Time.rc ${DATA}/HEMCO_sa_Time.rc +cpreq ${FIXaqm}/nexus/${NEXUS_GRID_FN} ${DATA}/grid_spec.nc -cp_vrfy ${NEXUS_FIX_DIR}/${NEXUS_GRID_FN} ${DATA}/grid_spec.nc if [ "${NUM_SPLIT_NEXUS}" = "01" ]; then nspt="00" - cp_vrfy ${COMIN}/NEXUS/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.${nspt}.nc ${DATA}/NEXUS_Expt_combined.nc + cpreq ${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.${nspt}.nc ${DATA}/NEXUS_Expt_combined.nc else - python3 ${ARL_NEXUS_DIR}/utils/python/concatenate_nexus_post_split.py "${COMIN}/NEXUS/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.*.nc" "${DATA}/NEXUS_Expt_combined.nc" + ${USHsrw}/nexus_utils/python/concatenate_nexus_post_split.py "${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.*.nc" "${DATA}/NEXUS_Expt_combined.nc" export err=$? if [ $err -ne 0 ]; then message_txt="Call to python script \"concatenate_nexus_post_split.py\" failed." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi fi - # #----------------------------------------------------------------------- # @@ -101,17 +96,13 @@ fi # #----------------------------------------------------------------------- # -python3 ${ARL_NEXUS_DIR}/utils/combine_ant_bio.py "${DATA}/NEXUS_Expt_combined.nc" ${DATA}/NEXUS_Expt.nc +${USHsrw}/nexus_utils/combine_ant_bio.py "${DATA}/NEXUS_Expt_combined.nc" ${DATA}/NEXUS_Expt.nc export err=$? if [ $err -ne 0 ]; then message_txt="Call to python script \"NEXUS_Expt_pretty.py\" failed." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi - # #----------------------------------------------------------------------- # @@ -119,7 +110,7 @@ fi # #----------------------------------------------------------------------- # -mv_vrfy ${DATA}/NEXUS_Expt.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt.nc +mv ${DATA}/NEXUS_Expt.nc ${COMOUT}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt.nc # # Print message indicating successful completion of script. # diff --git a/scripts/exregional_point_source.sh b/scripts/exsrw_point_source.sh similarity index 83% rename from scripts/exregional_point_source.sh rename to scripts/exsrw_point_source.sh index aeec8f3925..7acbc946f7 100755 --- a/scripts/exregional_point_source.sh +++ b/scripts/exsrw_point_source.sh @@ -7,7 +7,7 @@ # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. ${USHsrw}/source_util_funcs.sh source_config_for_task "task_run_fcst|cpl_aqm_parm|task_point_source" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +17,7 @@ source_config_for_task "task_run_fcst|cpl_aqm_parm|task_point_source" ${GLOBAL_V # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; set -xue; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -59,16 +59,15 @@ if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]} fi nstep=$(( FCST_LEN_HRS+1 )) -yyyymmddhh="${PDY}${cyc}" - +YYYYMMDDHH="${PDY}${cyc}" # #----------------------------------------------------------------------- # -# Set the directories for CONUS/HI/AK +# Path to the point source data files # #----------------------------------------------------------------------- # -PT_SRC_PRECOMB="${DCOMINpt_src}" +PT_SRC_PRECOMB="${FIXemis}/${PT_SRC_SUBDIR}" # #----------------------------------------------------------------------- # @@ -76,22 +75,17 @@ PT_SRC_PRECOMB="${DCOMINpt_src}" # #----------------------------------------------------------------------- # -if [ ! -s "${DATA}/pt-${yyyymmddhh}.nc" ]; then - python3 ${HOMEdir}/sorc/AQM-utils/python_utils/stack-pt-merge.py -s ${yyyymmddhh} -n ${nstep} -i ${PT_SRC_PRECOMB} +if [ ! -s "${DATA}/pt-${YYYYMMDDHH}.nc" ]; then + ${USHsrw}/aqm_utils_python/stack-pt-merge.py -s ${YYYYMMDDHH} -n ${nstep} -i ${PT_SRC_PRECOMB} export err=$? if [ $err -ne 0 ]; then message_txt="Call to python script \"stack-pt-merge.py\" failed." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi fi - # Move to COMIN -mv_vrfy ${DATA}/pt-${yyyymmddhh}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.PT.nc - +mv ${DATA}/pt-${YYYYMMDDHH}.nc ${COMOUT}/${NET}.${cycle}${dot_ensmem}.PT.nc # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_post_stat_o3.sh b/scripts/exsrw_post_stat_o3.sh similarity index 81% rename from scripts/exregional_post_stat_o3.sh rename to scripts/exsrw_post_stat_o3.sh index 94306d7336..6fa1db7f8f 100755 --- a/scripts/exregional_post_stat_o3.sh +++ b/scripts/exsrw_post_stat_o3.sh @@ -7,7 +7,7 @@ # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. ${USHsrw}/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_o3" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +17,7 @@ source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_o3" ${GLOBAL_V # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; set -xue; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -74,7 +74,7 @@ if [ "${PREDEF_GRID_NAME}" = "AQM_NA_13km" ]; then id_domain=793 fi -ln_vrfy -sf ${COMIN}/${NET}.${cycle}.chem_sfc.nc . +ln -sf ${COMIN}/${cyc}/${NET}.${cycle}.chem_sfc.nc . # cat >aqm_post.ini < filesize export XLFRTEOPTS="unit_vars=yes" @@ -145,18 +139,18 @@ for grid in 227 196 198;do export FORT12="filesize" export FORT31= export FORT51=awpaqm.${cycle}.${hr}ho3.${grid}.grib2 - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_ave_${hr}hr_o3-awpozcon.${cycle}.${grid} + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_ave_${hr}hr_o3-awpozcon.${cycle}.${grid} done for var in 1ho3 8ho3;do - cp_vrfy ${DATA}/${NET}.${cycle}.${var}*grib2 ${COMOUT} - cp_vrfy ${DATA}/awpaqm.${cycle}.${var}*grib2 ${COMOUTwmo} + cp ${DATA}/${NET}.${cycle}.${var}*grib2 ${COMOUT} + cp ${DATA}/awpaqm.${cycle}.${var}*grib2 ${COMOUTwmo} done for var in awpozcon;do - cp_vrfy ${DATA}/${NET}.${cycle}.${var}*grib2 ${COMOUT} + cp ${DATA}/${NET}.${cycle}.${var}*grib2 ${COMOUT} done else for var in 1ho3 awpozcon;do - cp_vrfy ${DATA}/${NET}.${cycle}.${var}*grib2 ${COMOUT} + cp ${DATA}/${NET}.${cycle}.${var}*grib2 ${COMOUT} done fi done @@ -166,7 +160,7 @@ done #------------------------------------------------------------ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then - ln_vrfy -sf ${COMIN}/${NET}.${cycle}.chem_sfc.nc a.nc + ln -sf ${COMIN}/${cyc}/${NET}.${cycle}.chem_sfc.nc a.nc export chk=1 export chk1=1 @@ -185,10 +179,10 @@ EOF1 ## 06z needs b.nc to find current day output from 04Z to 06Z if [ "${cyc}" = "06" ]; then - if [ -s ${COMIN}/../00/${NET}.t00z.chem_sfc.nc ]; then - ln_vrfy -s ${COMIN}/../00/${NET}.t00z.chem_sfc.nc b.nc + if [ -s ${COMIN}/00/${NET}.t00z.chem_sfc.nc ]; then + ln -s ${COMIN}/00/${NET}.t00z.chem_sfc.nc b.nc elif [ -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc ]; then - ln_vrfy -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc b.nc + ln -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc b.nc chk=0 else flag_run_bicor_max=no @@ -197,20 +191,20 @@ EOF1 if [ "${cyc}" = "12" ]; then ## 12z needs b.nc to find current day output from 04Z to 06Z - if [ -s ${COMIN}/../00/${NET}.t00z.chem_sfc.nc ]; then - ln_vrfy -s ${COMIN}/../00/${NET}.t00z.chem_sfc.nc b.nc + if [ -s ${COMIN}/00/${NET}.t00z.chem_sfc.nc ]; then + ln -s ${COMIN}/00/${NET}.t00z.chem_sfc.nc b.nc elif [ -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc ]; then - ln_vrfy -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc b.nc + ln -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc b.nc chk=0 else flag_run_bicor_max=no fi ## 12z needs c.nc to find current day output from 07Z to 12z - if [ -s ${COMIN}/../06/${NET}.t06z.chem_sfc.nc ]; then - ln_vrfy -s ${COMIN}/../06/${NET}.t06z.chem_sfc.nc c.nc + if [ -s ${COMIN}/06/${NET}.t06z.chem_sfc.nc ]; then + ln -s ${COMIN}/06/${NET}.t06z.chem_sfc.nc c.nc elif [ -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc ]; then - ln_vrfy -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc c.nc + ln -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc c.nc chk1=0 else flag_run_bicor_max=no @@ -220,13 +214,7 @@ EOF1 PREP_STEP eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_maxi_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR} export err=$? - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then err_chk - else - if [ $err -ne 0 ]; then - print_err_msg_exit "Call to executable to run AQM_POST_MAXI_GRIB2 returned with nonzero exit code." - fi - fi POST_STEP # split into max_1h and max_8h files and copy to grib227 @@ -234,7 +222,6 @@ EOF1 wgrib2 aqm-maxi.${id_domain}.grib2 |grep "OZMAX8" | wgrib2 -i aqm-maxi.${id_domain}.grib2 -grib ${NET}.${cycle}.max_8hr_o3.${id_domain}.grib2 grid227="lambert:265.0000:25.0000:25.0000 226.5410:1473:5079.000 12.1900:1025:5079.000" - #export grid148="lambert:263.0000:33.0000:45.0000 239.3720:442:12000.000 21.8210:265:12000.000" grid196="mercator:20.0000 198.4750:321:2500.000:206.1310 18.0730:255:2500.000:23.0880" grid198="nps:210.0000:60.0000 181.4290:825:5953.000 40.5300:553:5953.000" @@ -243,7 +230,7 @@ EOF1 wgrib2 ${NET}.${cycle}.max_8hr_o3.${id_domain}.grib2 -set_grib_type c3b -new_grid_winds earth -new_grid ${!gg} ${NET}.${cycle}.max_8hr_o3.${grid}.grib2 wgrib2 ${NET}.${cycle}.max_1hr_o3.${id_domain}.grib2 -set_grib_type c3b -new_grid_winds earth -new_grid ${!gg} ${NET}.${cycle}.max_1hr_o3.${grid}.grib2 - cp_vrfy ${DATA}/${NET}.${cycle}.max_*hr_o3.*.grib2 ${COMOUT} + cp ${DATA}/${NET}.${cycle}.max_*hr_o3.*.grib2 ${COMOUT} if [ "$SENDDBN" = "TRUE" ]; then ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUT}/${NET}.${cycle}.max_1hr_o3.${grid}.grib2 ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUT}/${NET}.${cycle}.max_8hr_o3.${grid}.grib2 @@ -257,24 +244,23 @@ EOF1 export FORT12="filesize" export FORT31= export FORT51=aqm-${hr}hro3-maxi.${grid}.grib2.temp - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm-${hr}hro3-maxi.${cycle}.${grid} + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm-${hr}hro3-maxi.${cycle}.${grid} echo `ls -l aqm-${hr}hro3-maxi.${grid}.grib2.temp | awk '{print $5} '` > filesize export XLFRTEOPTS="unit_vars=yes" export FORT11=aqm-${hr}hro3-maxi.${grid}.grib2.temp export FORT12="filesize" export FORT31= export FORT51=awpaqm.${cycle}.${hr}ho3-max.${grid}.grib2 - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm-${hr}hro3-maxi.${cycle}.${grid} + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm-${hr}hro3-maxi.${cycle}.${grid} done - cp_vrfy awpaqm.${cycle}.*o3-max.${grid}.grib2 ${COMOUTwmo} + cp awpaqm.${cycle}.*o3-max.${grid}.grib2 ${COMOUTwmo} if [ "${SENDDBN_NTC}" = "TRUE" ]; then ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.1ho3-max.${grid}.grib2 ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.8ho3-max.${grid}.grib2 fi done fi - # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_post_stat_pm25.sh b/scripts/exsrw_post_stat_pm25.sh similarity index 79% rename from scripts/exregional_post_stat_pm25.sh rename to scripts/exsrw_post_stat_pm25.sh index dc054b87a3..ea7c1717c3 100755 --- a/scripts/exregional_post_stat_pm25.sh +++ b/scripts/exsrw_post_stat_pm25.sh @@ -7,7 +7,7 @@ # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. ${USHsrw}/source_util_funcs.sh source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_pm25" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +17,7 @@ source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_pm25" ${GLOBAL # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; set -xue; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -62,7 +62,6 @@ else print_info_msg "$VERBOSE" " All executables will be submitted with command \'${RUN_CMD_SERIAL}\'." fi - # #----------------------------------------------------------------------- # @@ -79,7 +78,7 @@ fi # aqm_pm25_post #--------------------------------------------------------------- -ln_vrfy -sf ${COMIN}/${NET}.${cycle}.chem_sfc.nc . +ln -sf ${COMIN}/${cyc}/${NET}.${cycle}.chem_sfc.nc . cat >aqm_post.ini <> ${NET}.${cycle}.1hpm25.${id_domain}.grib2 @@ -115,7 +108,7 @@ for grid in 227 196 198; do wgrib2 ${NET}.${cycle}.1hpm25.${id_domain}.grib2 -set_grib_type c3b -new_grid_winds earth -new_grid ${!gg} ${NET}.${cycle}.1hpm25.${grid}.grib2 done -cp_vrfy ${DATA}/${NET}.${cycle}*pm25*.grib2 ${COMOUT} +cp ${DATA}/${NET}.${cycle}*pm25*.grib2 ${COMOUT} # Create AWIPS GRIB2 data for Bias-Corrected PM2.5 if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then @@ -126,7 +119,7 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT12="filesize" export FORT31= export FORT51=${NET}.${cycle}.1hpm25.${grid}.grib2.temp - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_1hpm25.${cycle}.${grid} + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_1hpm25.${cycle}.${grid} echo `ls -l ${NET}.${cycle}.grib2_pm25.${grid}.temp | awk '{print $5} '` > filesize export XLFRTEOPTS="unit_vars=yes" @@ -134,16 +127,16 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT12="filesize" export FORT31= export FORT51=awpaqm.${cycle}.1hpm25.${grid}.grib2 - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_1hpm25.${cycle}.${grid} + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_1hpm25.${cycle}.${grid} # Post Files to COMOUTwmo - cp_vrfy awpaqm.${cycle}.1hpm25.${grid}.grib2 ${COMOUTwmo} + cp awpaqm.${cycle}.1hpm25.${grid}.grib2 ${COMOUTwmo} # Distribute Data - if [ "${SENDDBN_NTC}" = "TRUE" ] ; then - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.1hpm25.${grid}.grib2 - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 - fi +# if [ "${SENDDBN_NTC}" = "TRUE" ] ; then +# ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.1hpm25.${grid}.grib2 +# ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 +# fi done fi @@ -152,7 +145,7 @@ fi #--------------------------------------------------------------- if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then - ln_vrfy -sf ${COMIN}/${NET}.${cycle}.chem_sfc.nc a.nc + ln -sf ${COMIN}/${cyc}/${NET}.${cycle}.chem_sfc.nc a.nc export chk=1 export chk1=1 @@ -170,10 +163,10 @@ EOF1 flag_run_bicor_max=yes # 06z needs b.nc to find current day output from 04Z to 06Z if [ "${cyc}" = "06" ]; then - if [ -s ${COMIN}/../00/${NET}.t00z.chem_sfc.nc ]; then - ln_vrfy -sf ${COMIN}/../00/${NET}.t00z.chem_sfc.nc b.nc + if [ -s ${COMIN}/00/${NET}.t00z.chem_sfc.nc ]; then + ln -sf ${COMIN}/00/${NET}.t00z.chem_sfc.nc b.nc elif [ -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc ]; then - ln_vrfy -sf ${COMINm1}/12/${NET}.t12z.chem_sfc.nc b.nc + ln -sf ${COMINm1}/12/${NET}.t12z.chem_sfc.nc b.nc chk=0 else flag_run_bicor_max=no @@ -182,20 +175,20 @@ EOF1 if [ "${cyc}" = "12" ]; then # 12z needs b.nc to find current day output from 04Z to 06Z - if [ -s ${COMIN}/../00/${NET}.t00z.chem_sfc.nc ]; then - ln_vrfy -sf ${COMIN}/../00/${NET}.t00z.chem_sfc.nc b.nc + if [ -s ${COMIN}/00/${NET}.t00z.chem_sfc.nc ]; then + ln -sf ${COMIN}/00/${NET}.t00z.chem_sfc.nc b.nc elif [ -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc ]; then - ln_vrfy -sf ${COMINm1}/12/${NET}.${PDYm1}.t12z.chem_sfc.nc b.nc + ln -sf ${COMINm1}/12/${NET}.${PDYm1}.t12z.chem_sfc.nc b.nc chk=0 else flag_run_bicor_max=no fi # 12z needs c.nc to find current day output from 07Z to 12z - if [ -s ${COMIN}/../06/${NET}.t06z.chem_sfc.nc ]; then - ln_vrfy -sf ${COMIN}/../06/${NET}.t06z.chem_sfc.nc c.nc + if [ -s ${COMIN}/06/${NET}.t06z.chem_sfc.nc ]; then + ln -sf ${COMIN}/06/${NET}.t06z.chem_sfc.nc c.nc elif [ -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc ]; then - ln_vrfy -sf ${COMINm1}/12/${NET}.t12z.chem_sfc.nc c.nc + ln -sf ${COMINm1}/12/${NET}.t12z.chem_sfc.nc c.nc chk1=0 else flag_run_bicor_max=no @@ -205,13 +198,7 @@ EOF1 PREP_STEP eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_maxi_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR} export err=$? - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then err_chk - else - if [ $err -ne 0 ]; then - print_err_msg_exit "Call to executable to run AQM_POST_MAXI_GRIB2 returned with nonzero exit code." - fi - fi POST_STEP wgrib2 ${NET}_pm25_24h_ave.${id_domain}.grib2 |grep "PMTF" | wgrib2 -i ${NET}_pm25_24h_ave.${id_domain}.grib2 -grib ${NET}.${cycle}.ave_24hr_pm25.${id_domain}.grib2 @@ -228,14 +215,14 @@ EOF1 wgrib2 ${NET}.${cycle}.max_1hr_pm25.${id_domain}.grib2 -set_grib_type c3b -new_grid_winds earth -new_grid ${!gg} ${NET}.${cycle}.1hpm25-max.${grid}.grib2 # Add WMO header for daily 1h PM2.5 and 24hr_ave PM2.5 - rm_vrfy -f filesize + rm -f filesize echo 0 > filesize export XLFRTEOPTS="unit_vars=yes" export FORT11=${NET}.${cycle}.1hpm25-max.${grid}.grib2 export FORT12="filesize" export FORT31= export FORT51=${NET}.${cycle}.max_1hr_pm25.${grid}.grib2.temp - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_max_1hr_pm25.${cycle}.${grid} + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_max_1hr_pm25.${cycle}.${grid} echo `ls -l ${NET}.${cycle}.max_1hr_pm25.${grid}.grib2.temp | awk '{print $5} '` > filesize export XLFRTEOPTS="unit_vars=yes" @@ -243,16 +230,16 @@ EOF1 export FORT12="filesize" export FORT31= export FORT51=awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_max_1hr_pm25.${cycle}.${grid} + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_max_1hr_pm25.${cycle}.${grid} - rm_vrfy -f filesize + rm -f filesize echo 0 > filesize export XLFRTEOPTS="unit_vars=yes" export FORT11=${NET}.${cycle}.24hrpm25-ave.${grid}.grib2 export FORT12="filesize" export FORT31= export FORT51=${NET}.${cycle}.24hrpm25-ave.${grid}.grib2.temp - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_ave_24hrpm25_awp.${cycle}.${grid} + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_ave_24hrpm25_awp.${cycle}.${grid} echo `ls -l ${NET}.${cycle}.24hrpm25-ave.${grid}.grib2.temp | awk '{print $5} '` > filesize export XLFRTEOPTS="unit_vars=yes" @@ -260,20 +247,28 @@ EOF1 export FORT12="filesize" export FORT31= export FORT51=awpaqm.${cycle}.24hr-pm25-ave.${grid}.grib2 - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_ave_24hrpm25_awp.${cycle}.${grid} + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_ave_24hrpm25_awp.${cycle}.${grid} - cp_vrfy ${DATA}/${NET}.${cycle}.ave_24hr_pm25*.grib2 ${COMOUT} - cp_vrfy ${DATA}/${NET}.${cycle}.max_1hr_pm25*.grib2 ${COMOUT} - cp_vrfy awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 ${COMOUTwmo} - cp_vrfy awpaqm.${cycle}.24hr-pm25-ave.${grid}.grib2 ${COMOUTwmo} + cp ${DATA}/${NET}.${cycle}.ave_24hr_pm25*.grib2 ${COMOUT} + cp ${DATA}/${NET}.${cycle}.max_1hr_pm25*.grib2 ${COMOUT} + cp awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 ${COMOUTwmo} + cp awpaqm.${cycle}.24hr-pm25-ave.${grid}.grib2 ${COMOUTwmo} + + ############################## + # Distribute Data + ############################## + + if [ "${SENDDBN_NTC}" = "TRUE" ] ; then + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.1hpm25.${grid}.grib2 + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 + fi if [ "$SENDDBN" = "TRUE" ]; then - ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUTwmo}/${NET}.${cycle}.ave_24hr_pm25.${grid}.grib2 - ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUTwmo}/${NET}.${cycle}.max_1hr_pm25.${grid}.grib2 + ${DBNROOT}/bin/dbn_alert MODEL AQM_PM ${job} ${COMOUTwmo}/awpaqm.${cycle}.24hr-pm25-ave.${grid}.grib2 + ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUTwmo}/awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 fi done fi - # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_pre_post_stat.sh b/scripts/exsrw_pre_post_stat.sh similarity index 81% rename from scripts/exregional_pre_post_stat.sh rename to scripts/exsrw_pre_post_stat.sh index 44f4637684..dfb4c2cf9e 100755 --- a/scripts/exregional_pre_post_stat.sh +++ b/scripts/exsrw_pre_post_stat.sh @@ -7,7 +7,7 @@ # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh +. ${USHsrw}/source_util_funcs.sh source_config_for_task "task_pre_post|task_run_post" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -17,7 +17,7 @@ source_config_for_task "task_pre_post|task_run_post" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; set -xue; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -63,11 +63,11 @@ ist=1 while [ "$ist" -le "${FCST_LEN_HRS}" ]; do hst=$( printf "%03d" "${ist}" ) - rm_vrfy -f ${DATA}/tmp*nc - rm_vrfy -f ${DATA}/${NET}.${cycle}.chem_sfc_f${hst}*nc - rm_vrfy -f ${DATA}/${NET}.${cycle}.met_sfc_f${hst}*nc + rm -f ${DATA}/tmp*nc + rm -f ${DATA}/${NET}.${cycle}.chem_sfc_f${hst}*nc + rm -f ${DATA}/${NET}.${cycle}.met_sfc_f${hst}*nc - ncks -v lat,lon,o3_ave,no_ave,no2_ave,pm25_ave -d pfull,63,63 ${COMIN}/${NET}.${cycle}.dyn.f${hst}.nc ${DATA}/tmp2a.nc + ncks -v lat,lon,o3_ave,no_ave,no2_ave,pm25_ave -d pfull,63,63 ${DATA_SHARE}/${NET}.${cycle}.dyn.f${hst}.nc ${DATA}/tmp2a.nc ncks -C -O -x -v pfull ${DATA}/tmp2a.nc ${DATA}/tmp2b.nc @@ -75,11 +75,11 @@ while [ "$ist" -le "${FCST_LEN_HRS}" ]; do ncrename -v o3_ave,o3 -v no_ave,no -v no2_ave,no2 -v pm25_ave,PM25_TOT ${DATA}/tmp2c.nc - mv_vrfy ${DATA}/tmp2c.nc ${DATA}/${NET}.${cycle}.chem_sfc.f${hst}.nc + mv ${DATA}/tmp2c.nc ${DATA}/${NET}.${cycle}.chem_sfc.f${hst}.nc - ncks -v dswrf,hpbl,tmp2m,ugrd10m,vgrd10m,spfh2m ${COMIN}/${NET}.${cycle}.phy.f${hst}.nc ${DATA}/${NET}.${cycle}.met_sfc.f${hst}.nc + ncks -v dswrf,hpbl,tmp2m,ugrd10m,vgrd10m,spfh2m ${DATA_SHARE}/${NET}.${cycle}.phy.f${hst}.nc ${DATA}/${NET}.${cycle}.met_sfc.f${hst}.nc - ncks -v aod ${COMIN}/${NET}.${cycle}.phy.f${hst}.nc ${DATA}/${NET}.${cycle}.aod.f${hst}.nc + ncks -v aod ${DATA_SHARE}/${NET}.${cycle}.phy.f${hst}.nc ${DATA}/${NET}.${cycle}.aod.f${hst}.nc (( ist=ist+1 )) done @@ -101,7 +101,6 @@ while [ "${ist}" -le "${FCST_LEN_HRS}" ]; do done ncecat ${DATA}/${NET}.${cycle}.chem_sfc.f*.nc ${DATA}/${NET}.${cycle}.chem_sfc.nc - # #----------------------------------------------------------------------- # @@ -109,10 +108,10 @@ ncecat ${DATA}/${NET}.${cycle}.chem_sfc.f*.nc ${DATA}/${NET}.${cycle}.chem_sfc. # #----------------------------------------------------------------------- # -mv_vrfy ${DATA}/${NET}.${cycle}.met_sfc.f*.nc ${COMIN} -mv_vrfy ${DATA}/${NET}.${cycle}.chem_sfc.f*.nc ${COMIN} -mv_vrfy ${DATA}/${NET}.${cycle}.chem_sfc.nc ${COMIN} -mv_vrfy ${DATA}/${NET}.${cycle}.aod.f*.nc ${COMIN} +mv ${DATA}/${NET}.${cycle}.met_sfc.f*.nc ${COMOUT} +mv ${DATA}/${NET}.${cycle}.chem_sfc.f*.nc ${COMOUT} +mv ${DATA}/${NET}.${cycle}.chem_sfc.nc ${COMOUT} +mv ${DATA}/${NET}.${cycle}.aod.f*.nc ${COMOUT} # #----------------------------------------------------------------------- # diff --git a/tests/WE2E/machine_suites/comprehensive b/tests/WE2E/machine_suites/comprehensive index a416408056..3af6ae0db4 100644 --- a/tests/WE2E/machine_suites/comprehensive +++ b/tests/WE2E/machine_suites/comprehensive @@ -66,12 +66,6 @@ MET_ensemble_verification_only_vx MET_ensemble_verification_only_vx_time_lag MET_ensemble_verification_winter_wx MET_verification_only_vx -nco -nco_ensemble -nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 -nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16 -nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km -nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR pregen_grid_orog_sfc_climo specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS specify_template_filenames diff --git a/tests/WE2E/machine_suites/comprehensive.cheyenne b/tests/WE2E/machine_suites/comprehensive.cheyenne index e518e0c4cb..96792e37b0 100644 --- a/tests/WE2E/machine_suites/comprehensive.cheyenne +++ b/tests/WE2E/machine_suites/comprehensive.cheyenne @@ -48,12 +48,6 @@ grid_SUBCONUS_Ind_3km_ics_NAM_lbcs_NAM_suite_GFS_v16 grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot MET_ensemble_verification_only_vx MET_verification_only_vx -nco -nco_ensemble -nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 -nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16 -nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km -nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR pregen_grid_orog_sfc_climo specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS specify_template_filenames diff --git a/tests/WE2E/machine_suites/comprehensive.derecho b/tests/WE2E/machine_suites/comprehensive.derecho index 1fa9d1c055..9ce8d067ac 100644 --- a/tests/WE2E/machine_suites/comprehensive.derecho +++ b/tests/WE2E/machine_suites/comprehensive.derecho @@ -55,12 +55,6 @@ grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot MET_ensemble_verification_only_vx MET_ensemble_verification_winter_wx MET_verification_only_vx -nco -nco_ensemble -nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 -nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16 -nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km -nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR pregen_grid_orog_sfc_climo specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS specify_template_filenames diff --git a/tests/WE2E/machine_suites/comprehensive.noaacloud b/tests/WE2E/machine_suites/comprehensive.noaacloud index f81d8c9d1a..23c0aa8456 100644 --- a/tests/WE2E/machine_suites/comprehensive.noaacloud +++ b/tests/WE2E/machine_suites/comprehensive.noaacloud @@ -49,12 +49,6 @@ grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot MET_ensemble_verification_only_vx MET_ensemble_verification_winter_wx MET_verification_only_vx -nco -nco_ensemble -nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 -nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16 -nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km -nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR pregen_grid_orog_sfc_climo specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS specify_template_filenames diff --git a/tests/WE2E/machine_suites/comprehensive.orion b/tests/WE2E/machine_suites/comprehensive.orion index b5b65c668b..739b4fff8e 100644 --- a/tests/WE2E/machine_suites/comprehensive.orion +++ b/tests/WE2E/machine_suites/comprehensive.orion @@ -55,12 +55,6 @@ grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot MET_ensemble_verification_only_vx MET_ensemble_verification_winter_wx MET_verification_only_vx -nco -nco_ensemble -nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 -nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16 -nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km -nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR pregen_grid_orog_sfc_climo specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS specify_template_filenames diff --git a/tests/WE2E/machine_suites/coverage.cheyenne b/tests/WE2E/machine_suites/coverage.cheyenne index 19bbc623c7..8f3c3ec78c 100644 --- a/tests/WE2E/machine_suites/coverage.cheyenne +++ b/tests/WE2E/machine_suites/coverage.cheyenne @@ -4,6 +4,5 @@ grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16 grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_HRRR grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_HRRR_suite_HRRR -nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16 pregen_grid_orog_sfc_climo specify_template_filenames diff --git a/tests/WE2E/machine_suites/coverage.derecho b/tests/WE2E/machine_suites/coverage.derecho index 3475caebcc..c2a770672e 100644 --- a/tests/WE2E/machine_suites/coverage.derecho +++ b/tests/WE2E/machine_suites/coverage.derecho @@ -4,7 +4,6 @@ grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16 grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_HRRR grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_HRRR_suite_HRRR -nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16 pregen_grid_orog_sfc_climo specify_template_filenames 2019_hurricane_barry diff --git a/tests/WE2E/machine_suites/coverage.gaea b/tests/WE2E/machine_suites/coverage.gaea index 068077464d..e6aba6ea3d 100644 --- a/tests/WE2E/machine_suites/coverage.gaea +++ b/tests/WE2E/machine_suites/coverage.gaea @@ -7,6 +7,4 @@ grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot -nco_ensemble -nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km 2020_CAPE diff --git a/tests/WE2E/machine_suites/coverage.hera.gnu.com b/tests/WE2E/machine_suites/coverage.hera.gnu.com index 75533b4609..4c802781f9 100644 --- a/tests/WE2E/machine_suites/coverage.hera.gnu.com +++ b/tests/WE2E/machine_suites/coverage.hera.gnu.com @@ -7,5 +7,4 @@ grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_RAP_suite_WoFS_v0 long_fcst MET_verification_only_vx MET_ensemble_verification_only_vx_time_lag -nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 2019_halloween_storm diff --git a/tests/WE2E/machine_suites/coverage.jet b/tests/WE2E/machine_suites/coverage.jet index a01d095828..53308090b1 100644 --- a/tests/WE2E/machine_suites/coverage.jet +++ b/tests/WE2E/machine_suites/coverage.jet @@ -9,4 +9,3 @@ grid_RRFS_AK_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2 grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta -nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR diff --git a/tests/WE2E/machine_suites/coverage.orion b/tests/WE2E/machine_suites/coverage.orion index dd13f27318..c698648b10 100644 --- a/tests/WE2E/machine_suites/coverage.orion +++ b/tests/WE2E/machine_suites/coverage.orion @@ -8,5 +8,4 @@ grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR grid_RRFS_CONUScompact_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0 -nco 2020_CAD diff --git a/tests/WE2E/machine_suites/fundamental b/tests/WE2E/machine_suites/fundamental index 858a442253..09d9482c7d 100644 --- a/tests/WE2E/machine_suites/fundamental +++ b/tests/WE2E/machine_suites/fundamental @@ -4,8 +4,6 @@ # Test RRFS_CONUScompact_25km grid, HRRR ics, RAP lbcs, RRFS_v1beta suite grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta -# Test grid_RRFS_CONUS_25km in NCO mode with FV3GFS bcs (6hr time offset), FV3_GFS_v16 suite -nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16 # Test grid_RRFS_CONUS_25km grid, FV3GFS bcs, inline post, GFS_v15p2 suite grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2 # Test grid_RRFS_CONUS_25km grid, FV3GFS bcs, restart files, GFS_v17_p8 suite diff --git a/tests/WE2E/run_WE2E_tests.py b/tests/WE2E/run_WE2E_tests.py index 5e1109c1ea..5d4bd81105 100755 --- a/tests/WE2E/run_WE2E_tests.py +++ b/tests/WE2E/run_WE2E_tests.py @@ -96,12 +96,6 @@ def run_we2e_tests(homedir, args) -> None: logging.debug(f'{testfilename} exists for this platform and run_envir'\ 'has not been specified\n'\ 'Setting run_envir = {run_envir} for all tests') - else: - if not run_envir: - run_envir = 'nco' - logging.debug(f'{testfilename} exists for this platform and run_envir has'\ - 'not been specified\n'\ - 'Setting run_envir = {run_envir} for all tests') logging.debug(f"Reading test file: {testfilename}") with open(testfilename, encoding="utf-8") as f: tests_to_check = [x.rstrip() for x in f] @@ -175,14 +169,6 @@ def run_we2e_tests(homedir, args) -> None: test_cfg['user'].update({"ACCOUNT": args.account}) if run_envir: test_cfg['user'].update({"RUN_ENVIR": run_envir}) - if run_envir == "nco": - if 'nco' not in test_cfg: - test_cfg['nco'] = dict() - test_cfg['nco'].update({"model_ver_default": "we2e"}) - if args.opsroot: - if 'nco' not in test_cfg: - test_cfg['nco'] = dict() - test_cfg['nco'].update({"OPSROOT_default": args.opsroot}) # if platform section was not in input config, initialize as empty dict if 'platform' not in test_cfg: test_cfg['platform'] = dict() @@ -529,9 +515,6 @@ def setup_logging(logfile: str = "log.run_WE2E_tests", debug: bool = False) -> N help='DEPRECATED; DO NOT USE. See "launch" option.') ap.add_argument('--cron_relaunch_intvl_mnts', type=int, help='Overrides CRON_RELAUNCH_INTVL_MNTS for all experiments') - ap.add_argument('--opsroot', type=str, - help='If test is for NCO mode, sets OPSROOT_default (see config_defaults.yaml'\ - 'for more details on this variable)') ap.add_argument('--print_test_info', action='store_true', help='Create a "WE2E_test_info.txt" file summarizing each test prior to'\ 'starting experiment') diff --git a/tests/WE2E/test_configs/default_configs/config.nco.yaml b/tests/WE2E/test_configs/default_configs/config.nco.yaml deleted file mode 120000 index 690636fd63..0000000000 --- a/tests/WE2E/test_configs/default_configs/config.nco.yaml +++ /dev/null @@ -1 +0,0 @@ -../../../../ush/config.nco.yaml \ No newline at end of file diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16.yaml deleted file mode 100644 index 8fc88c9b6a..0000000000 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16.yaml +++ /dev/null @@ -1,26 +0,0 @@ -metadata: - description: |- - This test is to ensure that the workflow running in nco mode completes - successfully on the RRFS_CONUS_13km grid using the GFS_v16 physics - suite with ICs and LBCs derived from the FV3GFS. -user: - RUN_ENVIR: nco -workflow: - CCPP_PHYS_SUITE: FV3_GFS_v16 - PREDEF_GRID_NAME: RRFS_CONUS_13km - DATE_FIRST_CYCL: '2019061500' - DATE_LAST_CYCL: '2019061500' - FCST_LEN_HRS: 6 - PREEXISTING_DIR_METHOD: rename -rocoto: - tasks: - taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/test.yaml"]|include }}' -task_get_extrn_ics: - USE_USER_STAGED_EXTRN_FILES: true - EXTRN_MDL_NAME_ICS: FV3GFS - FV3GFS_FILE_FMT_ICS: grib2 -task_get_extrn_lbcs: - USE_USER_STAGED_EXTRN_FILES: true - EXTRN_MDL_NAME_LBCS: FV3GFS - LBC_SPEC_INTVL_HRS: 3 - FV3GFS_FILE_FMT_LBCS: grib2 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16.yaml deleted file mode 100644 index 76c9656686..0000000000 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16.yaml +++ /dev/null @@ -1,26 +0,0 @@ -metadata: - description: |- - This test is to ensure that the workflow running in nco mode completes - successfully on the RRFS_CONUS_25km grid using the FV3_GFS_v16 physics - suite with time-offset ICs/LBCs derived from the FV3GFS. -user: - RUN_ENVIR: nco -workflow: - CCPP_PHYS_SUITE: FV3_GFS_v16 - PREDEF_GRID_NAME: RRFS_CONUS_25km - DATE_FIRST_CYCL: '2022081012' - DATE_LAST_CYCL: '2022081012' - FCST_LEN_HRS: 6 - PREEXISTING_DIR_METHOD: rename -rocoto: - tasks: - taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/test.yaml"]|include }}' -task_get_extrn_ics: - EXTRN_MDL_NAME_ICS: FV3GFS - EXTRN_MDL_ICS_OFFSET_HRS: 6 - FV3GFS_FILE_FMT_ICS: netcdf -task_get_extrn_lbcs: - EXTRN_MDL_NAME_LBCS: FV3GFS - LBC_SPEC_INTVL_HRS: 3 - EXTRN_MDL_LBCS_OFFSET_HRS: 6 - FV3GFS_FILE_FMT_LBCS: netcdf diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.yaml deleted file mode 100644 index 9a381857ed..0000000000 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.yaml +++ /dev/null @@ -1,28 +0,0 @@ -metadata: - description: |- - This test is to ensure that the workflow running in nco mode completes - successfully on the RRFS_CONUS_3km grid using the GFS_v15_thompson_mynn_lam3km - physics suite with ICs and LBCs derived from the FV3GFS. -user: - RUN_ENVIR: nco -workflow: - CCPP_PHYS_SUITE: FV3_GFS_v15_thompson_mynn_lam3km - PREDEF_GRID_NAME: RRFS_CONUS_3km - DATE_FIRST_CYCL: '2019061500' - DATE_LAST_CYCL: '2019061500' - FCST_LEN_HRS: 6 - PREEXISTING_DIR_METHOD: rename -rocoto: - tasks: - taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/test.yaml"]|include }}' -task_get_extrn_ics: - USE_USER_STAGED_EXTRN_FILES: true - EXTRN_MDL_NAME_ICS: FV3GFS - FV3GFS_FILE_FMT_ICS: grib2 -task_get_extrn_lbcs: - USE_USER_STAGED_EXTRN_FILES: true - EXTRN_MDL_NAME_LBCS: FV3GFS - LBC_SPEC_INTVL_HRS: 3 - FV3GFS_FILE_FMT_LBCS: grib2 -task_run_fcst: - USE_MERRA_CLIMO: true diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml deleted file mode 100644 index 0755e7fc4d..0000000000 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml +++ /dev/null @@ -1,30 +0,0 @@ -metadata: - description: |- - This test is to ensure that the workflow running in nco mode completes - successfully on the RRFS_CONUScompact_25km grid using the HRRR physics - suite with ICs derived from the HRRR and LBCs derived from the RAP. -user: - RUN_ENVIR: nco -workflow: - CCPP_PHYS_SUITE: FV3_HRRR - PREDEF_GRID_NAME: RRFS_CONUScompact_25km - DATE_FIRST_CYCL: '2020081000' - DATE_LAST_CYCL: '2020081000' - FCST_LEN_HRS: 6 - PREEXISTING_DIR_METHOD: rename -rocoto: - tasks: - taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/test.yaml"]|include }}' -task_get_extrn_ics: - EXTRN_MDL_NAME_ICS: HRRR - USE_USER_STAGED_EXTRN_FILES: true - EXTRN_MDL_FILES_ICS: - - '{yy}{jjj}{hh}00{fcst_hr:02d}00' -task_get_extrn_lbcs: - EXTRN_MDL_NAME_LBCS: RAP - LBC_SPEC_INTVL_HRS: 3 - USE_USER_STAGED_EXTRN_FILES: true - EXTRN_MDL_FILES_LBCS: - - '{yy}{jjj}{hh}00{fcst_hr:02d}00' -task_run_fcst: - WRITE_DOPOST: true diff --git a/tests/WE2E/test_configs/wflow_features/config.nco_ensemble.yaml b/tests/WE2E/test_configs/wflow_features/config.nco_ensemble.yaml deleted file mode 100644 index c060cb7f9f..0000000000 --- a/tests/WE2E/test_configs/wflow_features/config.nco_ensemble.yaml +++ /dev/null @@ -1,34 +0,0 @@ -metadata: - description: |- - This test checks the capability of the workflow to run ensemble forecasts - (i.e. DO_ENSEMBLE set to "TRUE") in nco mode (i.e. RUN_ENVIR set to - "nco") with the number of ensemble members (NUM_ENS_MEMBERS) set to - "2". The lack of leading zeros in this "2" should cause the ensemble - members to be named "mem1" and "mem2" (instead of, for instance, "mem01" - and "mem02"). - Note also that this test uses two cycle hours ("12" and "18") to test - the capability of the workflow to run ensemble forecasts for more than - one cycle hour in nco mode. -user: - RUN_ENVIR: nco -workflow: - CCPP_PHYS_SUITE: FV3_GFS_v15p2 - PREDEF_GRID_NAME: RRFS_CONUS_25km - DATE_FIRST_CYCL: '2019070100' - DATE_LAST_CYCL: '2019070212' - INCR_CYCL_FREQ: 12 - FCST_LEN_HRS: 6 - PREEXISTING_DIR_METHOD: rename -task_get_extrn_ics: - EXTRN_MDL_NAME_ICS: FV3GFS - USE_USER_STAGED_EXTRN_FILES: true -task_get_extrn_lbcs: - EXTRN_MDL_NAME_LBCS: FV3GFS - LBC_SPEC_INTVL_HRS: 3 - USE_USER_STAGED_EXTRN_FILES: true -global: - DO_ENSEMBLE: true - NUM_ENS_MEMBERS: 2 -rocoto: - tasks: - taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/test.yaml"]|include }}' diff --git a/tests/WE2E/test_configs/wflow_features/config.nco_inline_post.yaml b/tests/WE2E/test_configs/wflow_features/config.nco_inline_post.yaml deleted file mode 120000 index 6ec59fe0dd..0000000000 --- a/tests/WE2E/test_configs/wflow_features/config.nco_inline_post.yaml +++ /dev/null @@ -1 +0,0 @@ -../grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml \ No newline at end of file diff --git a/tests/test_python/test_generate_FV3LAM_wflow.py b/tests/test_python/test_generate_FV3LAM_wflow.py index 9e9e9f5274..48029d21b6 100644 --- a/tests/test_python/test_generate_FV3LAM_wflow.py +++ b/tests/test_python/test_generate_FV3LAM_wflow.py @@ -8,12 +8,9 @@ from multiprocessing import Process from python_utils import ( - load_config_file, - update_dict, cp_vrfy, run_command, define_macos_utilities, - cfg_to_yaml_str, set_env_var, get_env_var, ) @@ -24,7 +21,7 @@ class Testing(unittest.TestCase): """ Class to run the tests. """ def test_generate_FV3LAM_wflow(self): - """ Test that a community and nco sample config can successfully + """ Test that a sample config can successfully lead to the creation of an experiment directory. No jobs are submitted. """ @@ -49,30 +46,6 @@ def run_workflow(USHdir, logfile): ) run_workflow(USHdir, logfile) - # nco test case - nco_test_config = load_config_file(f"{USHdir}/config.nco.yaml") - # Since we don't have a pre-gen grid dir on a generic linux - # platform, turn the make_* tasks on for this test. - cfg_updates = { - "user": { - "MACHINE": "linux", - }, - "rocoto": { - "tasks": { - "taskgroups": \ - """'{{ ["parm/wflow/prep.yaml", - "parm/wflow/coldstart.yaml", - "parm/wflow/post.yaml"]|include }}'""" - }, - }, - } - update_dict(cfg_updates, nco_test_config) - - with open(f"{USHdir}/config.yaml", "w", encoding="utf-8") as cfg_file: - cfg_file.write(cfg_to_yaml_str(nco_test_config)) - - run_workflow(USHdir, logfile) - def setUp(self): define_macos_utilities() set_env_var("DEBUG", False) diff --git a/ush/config.aqm.community.yaml b/ush/config.aqm.community.yaml index 7586719f2e..2f32d0eac5 100644 --- a/ush/config.aqm.community.yaml +++ b/ush/config.aqm.community.yaml @@ -2,7 +2,7 @@ metadata: description: config for Online-CMAQ, AQM_NA_13km, community mode user: RUN_ENVIR: community - MACHINE: [hera or wcoss2] + MACHINE: hera ACCOUNT: [account name] workflow: USE_CRON_TO_RELAUNCH: true diff --git a/ush/config.aqm.nco.realtime.yaml b/ush/config.aqm.nco.realtime.yaml deleted file mode 100644 index f2299eacc9..0000000000 --- a/ush/config.aqm.nco.realtime.yaml +++ /dev/null @@ -1,99 +0,0 @@ -metadata: - description: config for Online-CMAQ, AQM_NA_13km, real-time, NCO mode on WCOSS2 -user: - RUN_ENVIR: nco - MACHINE: wcoss2 - ACCOUNT: [account name] -workflow: - USE_CRON_TO_RELAUNCH: true - CRON_RELAUNCH_INTVL_MNTS: 3 - EXPT_SUBDIR: aqm_nco_aqmna13km - PREDEF_GRID_NAME: AQM_NA_13km - CCPP_PHYS_SUITE: FV3_GFS_v16 - DATE_FIRST_CYCL: '2023051600' - DATE_LAST_CYCL: '2023051618' - INCR_CYCL_FREQ: 6 - FCST_LEN_HRS: -1 - FCST_LEN_CYCL: - - 6 - - 72 - - 72 - - 6 - PREEXISTING_DIR_METHOD: rename - VERBOSE: true - DEBUG: true - COMPILER: intel - DIAG_TABLE_TMPL_FN: diag_table_aqm.FV3_GFS_v16 - FIELD_TABLE_TMPL_FN: field_table_aqm.FV3_GFS_v16 - DO_REAL_TIME: true - COLDSTART: false - WARMSTART_CYCLE_DIR: /path/to/restart/dir -nco: - envir_default: prod - NET_default: aqm - model_ver_default: v7.0 - RUN_default: aqm - OPSROOT_default: /path/to/custom/opsroot - KEEPDATA_default: true -rocoto: - tasks: - taskgroups: '{{ ["parm/wflow/aqm_prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/aqm_post.yaml"]|include }}' - task_get_extrn_lbcs: - walltime: 02:00:00 - metatask_run_ensemble: - task_run_fcst_mem#mem#: - walltime: 04:00:00 -# task_aqm_ics_ext: - task_aqm_lbcs: - walltime: 01:00:00 -task_make_grid: - GRID_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/DOMAIN_DATA/AQM_NA_13km -task_make_orog: - OROG_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/DOMAIN_DATA/AQM_NA_13km -task_make_sfc_climo: - SFC_CLIMO_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/DOMAIN_DATA/AQM_NA_13km -task_get_extrn_ics: - EXTRN_MDL_NAME_ICS: FV3GFS - FV3GFS_FILE_FMT_ICS: netcdf - EXTRN_MDL_ICS_OFFSET_HRS: 6 -task_get_extrn_lbcs: - EXTRN_MDL_NAME_LBCS: FV3GFS - LBC_SPEC_INTVL_HRS: 6 - FV3GFS_FILE_FMT_LBCS: netcdf - EXTRN_MDL_LBCS_OFFSET_HRS: 6 -task_run_fcst: - DT_ATMOS: 180 - LAYOUT_X: 50 - LAYOUT_Y: 34 - BLOCKSIZE: 16 - RESTART_INTERVAL: 6 24 42 60 - QUILTING: true - PRINT_ESMF: false - DO_FCST_RESTART: false -task_run_post: - POST_OUTPUT_DOMAIN_NAME: 793 -global: - DO_ENSEMBLE: false - NUM_ENS_MEMBERS: 2 - HALO_BLEND: 0 -cpl_aqm_parm: - CPL_AQM: true - DO_AQM_CHEM_LBCS: true - DO_AQM_GEFS_LBCS: true - DO_AQM_DUST: true - DO_AQM_CANOPY: false - DO_AQM_PRODUCT: true - DO_AQM_SAVE_AIRNOW_HIST: false - DO_AQM_SAVE_FIRE: false - AQM_BIO_FILE: BEIS_RRFScmaq_C775.ncf - AQM_DUST_FILE_PREFIX: FENGSHA_p8_10km_inputs - AQM_DUST_FILE_SUFFIX: .nc - AQM_CANOPY_FILE_PREFIX: gfs.t12z.geo - AQM_CANOPY_FILE_SUFFIX: .canopy_regrid.nc - AQM_FIRE_FILE_PREFIX: Hourly_Emissions_regrid_NA_13km - AQM_FIRE_FILE_SUFFIX: _h72.nc - AQM_RC_FIRE_FREQUENCY: hourly - AQM_LBCS_FILES: am4_bndy.c793.2019.v1.nc - NEXUS_GRID_FN: grid_spec_793.nc - NUM_SPLIT_NEXUS: 6 - diff --git a/ush/config.nco.yaml b/ush/config.nco.yaml deleted file mode 100644 index afcce0ba8a..0000000000 --- a/ush/config.nco.yaml +++ /dev/null @@ -1,41 +0,0 @@ -metadata: - description: >- - Sample nco config -user: - RUN_ENVIR: nco - MACHINE: hera - ACCOUNT: an_account -workflow: - USE_CRON_TO_RELAUNCH: false - EXPT_SUBDIR: test_nco - CCPP_PHYS_SUITE: FV3_GFS_v16 - PREDEF_GRID_NAME: RRFS_CONUS_25km - DATE_FIRST_CYCL: '2022040700' - DATE_LAST_CYCL: '2022040700' - FCST_LEN_HRS: 6 - PREEXISTING_DIR_METHOD: rename - VERBOSE: true - COMPILER: intel -nco: - model_ver_default: v1.0 - RUN_default: srw_test -task_get_extrn_ics: - EXTRN_MDL_NAME_ICS: FV3GFS - FV3GFS_FILE_FMT_ICS: grib2 -task_get_extrn_lbcs: - EXTRN_MDL_NAME_LBCS: FV3GFS - LBC_SPEC_INTVL_HRS: 3 - FV3GFS_FILE_FMT_LBCS: grib2 -task_run_fcst: - WRITE_DOPOST: true - QUILTING: true -task_plot_allvars: - COMOUT_REF: "" -task_run_post: - POST_OUTPUT_DOMAIN_NAME: conus_25km -rocoto: - tasks: - taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' - metatask_run_ensemble: - task_run_fcst_mem#mem#: - walltime: 01:00:00 diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index b35b6108c7..6e7823c5d2 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -1116,31 +1116,8 @@ nco: # Name of model run (third level of com directory structure). # In general, same as ${NET_default}. # - # OPSROOT_default: - # The operations root directory in NCO mode. - # - # COMROOT_default: - # The com root directory for input/output data that is located on - # the current system. - # - # DATAROOT_default: - # Directory containing the (temporary) working directory for running - # jobs. - # - # DCOMROOT_default: - # dcom root directory, which contains input/incoming data that is - # retrieved from outside WCOSS. - # - # LOGBASEDIR_default: - # Directory in which the log files from the workflow tasks will be placed. - # - # COMIN_BASEDIR: - # com directory for current model's input data, typically - # $COMROOT/$NET/$model_ver/$RUN.$PDY - # - # COMOUT_BASEDIR: - # com directory for current model's output data, typically - # $COMROOT/$NET/$model_ver/$RUN.$PDY + # PTMP: + # User-defined path to the com type directories (OPSROOT=$PTMP/$envir). # # DBNROOT_default: # Root directory for the data-alerting utilities. @@ -1174,26 +1151,20 @@ nco: # #----------------------------------------------------------------------- # - envir_default: "para" + envir_default: "test" NET_default: "srw" RUN_default: "srw" model_ver_default: "v1.0.0" - OPSROOT_default: '{{ workflow.EXPT_BASEDIR }}/../nco_dirs' - COMROOT_default: '{{ OPSROOT_default }}/com' - DATAROOT_default: '{{ OPSROOT_default }}/tmp' - DCOMROOT_default: '{{ OPSROOT_default }}/dcom' - LOGBASEDIR_default: '{% if user.RUN_ENVIR == "nco" %}{{ [OPSROOT_default, "output"]|path_join }}{% else %}{{ [workflow.EXPTDIR, "log"]|path_join }}{% endif %}' - COMIN_BASEDIR: '{{ COMROOT_default }}/{{ NET_default }}/{{ model_ver_default }}' - COMOUT_BASEDIR: '{{ COMROOT_default }}/{{ NET_default }}/{{ model_ver_default }}' + PTMP: '{{ workflow.EXPT_BASEDIR }}/../nco_dirs' DBNROOT_default: "" - SENDECF_default: false - SENDDBN_default: false - SENDDBN_NTC_default: false - SENDCOM_default: false - SENDWEB_default: false - KEEPDATA_default: true + SENDECF_default: "NO" + SENDDBN_default: "NO" + SENDDBN_NTC_default: "NO" + SENDCOM_default: "YES" + SENDWEB_default: "NO" + KEEPDATA_default: "YES" MAILTO_default: "" MAILCC_default: "" @@ -2173,6 +2144,10 @@ task_nexus_emission: # PPN_NEXUS_EMISSION: # Processes per node for the nexus_emission_* tasks. # + # NNODES_NEXUS_EMISSION: + # The number of nodes to request from the job scheduler + # for the nexus emission task. + # # KMP_AFFINITY_NEXUS_EMISSION: # Intel Thread Affinity Interface for the nexus_emission_* tasks. # @@ -2183,10 +2158,22 @@ task_nexus_emission: # Controls the size of the stack for threads created by the OpenMP implementation. #------------------------------------------------------------------------------- PPN_NEXUS_EMISSION: '{{ platform.NCORES_PER_NODE // OMP_NUM_THREADS_NEXUS_EMISSION }}' + NNODES_NEXUS_EMISSION: 4 KMP_AFFINITY_NEXUS_EMISSION: "scatter" OMP_NUM_THREADS_NEXUS_EMISSION: 2 OMP_STACKSIZE_NEXUS_EMISSION: "1024m" +#----------------------------- +# POINT_SOURCE config parameters +#----------------------------- +task_point_source: + #------------------------------------------------------------------------------- + # PT_SRC_SUBDIR: + # Sub-directory structure of point source data under FIXemis. + # Full path: FIXemis/PT_SRC_SUBDIR + #------------------------------------------------------------------------------- + PT_SRC_SUBDIR: "NEI2016v1/v2023-01-PT" + #---------------------------- # BIAS_CORRECTION_O3 config parameters #----------------------------- @@ -2574,41 +2561,15 @@ cpl_aqm_parm: # # DO_AQM_SAVE_FIRE: # Archive fire emission file to HPSS - # - # DCOMINbio_default: - # Path to the directory containing AQM bio files # - # DCOMINdust_default: - # Path to the directory containing AQM dust file + # COMINairnow_default: + # Path to the directory containing AIRNOW observation data # - # DCOMINcanopy_default: - # Path to the directory containing AQM canopy files - # - # DCOMINfire_default: + # COMINfire_default: # Path to the directory containing AQM fire files # - # DCOMINchem_lbcs_default: - # Path to the directory containing chemical LBC files - # - # DCOMINgefs_default: + # COMINgefs_default: # Path to the directory containing GEFS aerosol LBC files - # - # DCOMINpt_src_default: - # Parent directory containing point source files - # - # DCOMINairnow_default: - # Path to the directory containing AIRNOW observation data - # - # COMINbicor: - # Path of reading in historical training data for biascorrection - # - # COMOUTbicor: - # Path to save the current cycle's model output and AirNow obs as - # training data for future use. $COMINbicor and $COMOUTbicor can be - # distinguished by the ${yyyy}${mm}${dd} under the same location - # - # AQM_CONFIG_DIR: - # Configuration directory for AQM # # AQM_BIO_FILE: # File name of AQM BIO file @@ -2634,9 +2595,6 @@ cpl_aqm_parm: # AQM_FIRE_FILE_OFFSET_HRS: # Time offset when retrieving fire emission data files. # - # AQM_FIRE_ARCHV_DIR: - # Path to the archive directory for RAVE emission files on HPSS - # # AQM_RC_FIRE_FREQUENCY: # Fire frequency in aqm.rc # @@ -2655,13 +2613,6 @@ cpl_aqm_parm: # AQM_GEFS_FILE_CYC: # Cycle of the GEFS aerosol LBC files only if it is fixed # - # NEXUS_INPUT_DIR: - # Same as GRID_DIR but for the the air quality emission generation task. - # Should be blank for the default value specified in setup.sh - # - # NEXUS_FIX_DIR: - # Directory containing grid_spec files as the input file of nexus - # # NEXUS_GRID_FN: # File name of the input grid_spec file of nexus # @@ -2690,18 +2641,10 @@ cpl_aqm_parm: DO_AQM_SAVE_AIRNOW_HIST: false DO_AQM_SAVE_FIRE: false - DCOMINbio_default: "" - DCOMINdust_default: "/path/to/dust/dir" - DCOMINcanopy_default: "/path/to/canopy/dir" - DCOMINfire_default: "" - DCOMINchem_lbcs_default: "" - DCOMINgefs_default: "" - DCOMINpt_src_default: "/path/to/point/source/base/directory" - DCOMINairnow_default: "/path/to/airnow/obaservation/data" - COMINbicor: "/path/to/historical/airnow/data/dir" - COMOUTbicor: "/path/to/historical/airnow/data/dir" + COMINairnow_default: "/path/to/airnow/obaservation/data" + COMINfire_default: "" + COMINgefs_default: "" - AQM_CONFIG_DIR: "" AQM_BIO_FILE: "BEIS_SARC401.ncf" AQM_DUST_FILE_PREFIX: "FENGSHA_p8_10km_inputs" @@ -2713,7 +2656,6 @@ cpl_aqm_parm: AQM_FIRE_FILE_PREFIX: "GBBEPx_C401GRID.emissions_v003" AQM_FIRE_FILE_SUFFIX: ".nc" AQM_FIRE_FILE_OFFSET_HRS: 0 - AQM_FIRE_ARCHV_DIR: "/path/to/archive/dir/for/RAVE/on/HPSS" AQM_RC_FIRE_FREQUENCY: "static" AQM_RC_PRODUCT_FN: "aqm.prod.nc" @@ -2724,8 +2666,6 @@ cpl_aqm_parm: AQM_GEFS_FILE_PREFIX: "geaer" AQM_GEFS_FILE_CYC: "" - NEXUS_INPUT_DIR: "" - NEXUS_FIX_DIR: "" NEXUS_GRID_FN: "grid_spec_GSD_HRRR_25km.nc" NUM_SPLIT_NEXUS: 3 NEXUS_GFS_SFC_OFFSET_HRS: 0 diff --git a/ush/create_aqm_rc_file.py b/ush/create_aqm_rc_file.py index 5608e4cbf2..726e8eb0f3 100644 --- a/ush/create_aqm_rc_file.py +++ b/ush/create_aqm_rc_file.py @@ -61,25 +61,23 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): # # Set parameters in the aqm.rc file. # - aqm_rc_bio_file_fp=os.path.join(DCOMINbio, AQM_BIO_FILE) + aqm_rc_bio_file_fp=os.path.join(FIXaqm,"bio", AQM_BIO_FILE) # Fire config aqm_rc_fire_file_fp=os.path.join( COMIN, - "FIRE_EMISSION", f"{AQM_FIRE_FILE_PREFIX}_{yyyymmdd}_t{hh}z{AQM_FIRE_FILE_SUFFIX}" ) # Dust config aqm_rc_dust_file_fp=os.path.join( - DCOMINdust, + FIXaqm,"dust", f"{AQM_DUST_FILE_PREFIX}_{PREDEF_GRID_NAME}{AQM_DUST_FILE_SUFFIX}", ) # Canopy config aqm_rc_canopy_file_fp=os.path.join( - DCOMINcanopy, - PREDEF_GRID_NAME, + FIXaqm,"canopy",PREDEF_GRID_NAME, f"{AQM_CANOPY_FILE_PREFIX}.{mm}{AQM_CANOPY_FILE_SUFFIX}", ) # @@ -96,10 +94,9 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): "do_aqm_canopy": DO_AQM_CANOPY, "do_aqm_product": DO_AQM_PRODUCT, "ccpp_phys_suite": CCPP_PHYS_SUITE, - "aqm_config_dir": AQM_CONFIG_DIR, "init_concentrations": init_concentrations, "aqm_rc_bio_file_fp": aqm_rc_bio_file_fp, - "dcominbio": DCOMINbio, + "fixaqm": FIXaqm, "aqm_rc_fire_file_fp": aqm_rc_fire_file_fp, "aqm_rc_fire_frequency": AQM_RC_FIRE_FREQUENCY, "aqm_rc_dust_file_fp": aqm_rc_dust_file_fp, diff --git a/ush/job_preamble.sh b/ush/job_preamble.sh index e243f31b37..e9c3683c40 100644 --- a/ush/job_preamble.sh +++ b/ush/job_preamble.sh @@ -25,10 +25,10 @@ export envir="${envir:-${envir_default}}" export NET="${NET:-${NET_default}}" export RUN="${RUN:-${RUN_default}}" export model_ver="${model_ver:-${model_ver_default}}" -export COMROOT="${COMROOT:-${COMROOT_default}}" -export DATAROOT="${DATAROOT:-${DATAROOT_default}}" -export DCOMROOT="${DCOMROOT:-${DCOMROOT_default}}" -export LOGBASEDIR="${LOGBASEDIR:-${LOGBASEDIR_default}}" +export COMROOT="${COMROOT:-${PTMP}/${envir}/com}" +export DATAROOT="${DATAROOT:-${PTMP}/${envir}/tmp}" +export DCOMROOT="${DCOMROOT:-${PTMP}/${envir}/dcom}" +export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" export DBNROOT="${DBNROOT:-${DBNROOT_default}}" export SENDECF="${SENDECF:-${SENDECF_default}}" @@ -41,49 +41,25 @@ export MAILTO="${MAILTO:-${MAILTO_default}}" export MAILCC="${MAILCC:-${MAILCC_default}}" if [ "${RUN_ENVIR}" = "nco" ]; then + [[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT if [ "${MACHINE}" = "WCOSS2" ]; then - [[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" export COMINgefs="${COMINgefs:-$(compath.py ${envir}/gefs/${gefs_ver})}" else - export COMIN="${COMIN_BASEDIR}/${RUN}.${PDY}/${cyc}" - export COMOUT="${COMOUT_BASEDIR}/${RUN}.${PDY}/${cyc}" - export COMINm1="${COMIN_BASEDIR}/${RUN}.${PDYm1}" + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" + export COMINm1="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDYm1}/${cyc}}" fi else - export COMIN="${COMIN_BASEDIR}/${PDY}${cyc}" - export COMOUT="${COMOUT_BASEDIR}/${PDY}${cyc}" - export COMINm1="${COMIN_BASEDIR}/${RUN}.${PDYm1}" + export COMIN="${EXPTDIR}/${PDY}${cyc}" + export COMOUT="${EXPTDIR}/${PDY}${cyc}" + export COMINm1="${EXPTDIR}/${PDYm1}${cyc}" fi export COMOUTwmo="${COMOUTwmo:-${COMOUT}/wmo}" -export DCOMINbio="${DCOMINbio:-${DCOMINbio_default}}" -export DCOMINdust="${DCOMINdust:-${DCOMINdust_default}}" -export DCOMINcanopy="${DCOMINcanopy:-${DCOMINcanopy_default}}" -export DCOMINfire="${DCOMINfire:-${DCOMINfire_default}}" -export DCOMINchem_lbcs="${DCOMINchem_lbcs:-${DCOMINchem_lbcs_default}}" -export DCOMINgefs="${DCOMINgefs:-${DCOMINgefs_default}}" -export DCOMINpt_src="${DCOMINpt_src:-${DCOMINpt_src_default}}" -export DCOMINairnow="${DCOMINairnow:-${DCOMINairnow_default}}" - -# -#----------------------------------------------------------------------- -# -# Change YES/NO (NCO standards; job card) to TRUE/FALSE (workflow standards) -# for NCO environment variables -# -#----------------------------------------------------------------------- -# -export KEEPDATA=$(boolify "${KEEPDATA}") -export SENDCOM=$(boolify "${SENDCOM}") -export SENDDBN=$(boolify "${SENDDBN}") -export SENDDBN_NTC=$(boolify "${SENDDBN_NTC}") -export SENDECF=$(boolify "${SENDECF}") -export SENDWEB=$(boolify "${SENDWEB}") - # #----------------------------------------------------------------------- # @@ -91,11 +67,12 @@ export SENDWEB=$(boolify "${SENDWEB}") # #----------------------------------------------------------------------- # -if [ $subcyc -eq 0 ]; then - export cycle="t${cyc}z" +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" else - export cycle="t${cyc}${subcyc}z" + export cycle="t${cyc}z" fi + if [ "${RUN_ENVIR}" = "nco" ] && [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z $ENSMEM_INDX ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else diff --git a/ush/machine/hera.yaml b/ush/machine/hera.yaml index 0aadaa6d8b..4d836af317 100644 --- a/ush/machine/hera.yaml +++ b/ush/machine/hera.yaml @@ -19,7 +19,7 @@ platform: RUN_CMD_SERIAL: time RUN_CMD_UTILS: srun --export=ALL RUN_CMD_NEXUS: srun -n ${nprocs} --export=ALL - RUN_CMD_AQMLBC: srun --export=ALL -n ${NUMTS} + RUN_CMD_AQMLBC: srun --export=ALL -n ${numts} SCHED_NATIVE_CMD: --export=NONE SCHED_NATIVE_CMD_HPSS: -n 1 --export=NONE PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }' @@ -35,21 +35,14 @@ platform: FIXorg: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/fix/fix_orog FIXsfc: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/fix/fix_sfc_climo FIXshp: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/NaturalEarth + FIXaqm: /scratch2/NAGAPE/epic/SRW-AQM_DATA/fix_aqm + FIXemis: /scratch1/RDARCH/rda-arl-gpu/Barry.Baker/emissions/nexus EXTRN_MDL_DATA_STORES: hpss aws nomads cpl_aqm_parm: - AQM_CONFIG_DIR: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/fix/aqm/epa/data - DCOMINbio_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/fix/aqm/bio - DCOMINdust_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/FENGSHA - DCOMINcanopy_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/canopy - DCOMINfire_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/RAVE_fire - DCOMINchem_lbcs_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/LBCS/AQM_NA13km_AM4_v1 - DCOMINgefs_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/GEFS_DATA - DCOMINpt_src_default: /scratch1/RDARCH/rda-arl-gpu/Barry.Baker/emissions/nexus/NEI2016v1/v2023-01-PT - NEXUS_INPUT_DIR: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/emissions/nexus - NEXUS_FIX_DIR: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/nexus/fix - NEXUS_GFS_SFC_DIR: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/GFS_DATA - PT_SRC_BASEDIR: /scratch1/RDARCH/rda-arl-gpu/Barry.Baker/emissions/nexus/NEI2016v1/v2023-01-PT + COMINfire_default: /scratch2/NAGAPE/epic/SRW-AQM_DATA/aqm_data/RAVE_fire + COMINgefs_default: /scratch2/NAGAPE/epic/SRW-AQM_DATA/aqm_data/GEFS_DATA + NEXUS_GFS_SFC_DIR: /scratch2/NAGAPE/epic/SRW-AQM_DATA/aqm_data/GFS_SFC_DATA rocoto: tasks: diff --git a/ush/machine/wcoss2.yaml b/ush/machine/wcoss2.yaml index 53733959bb..b8c3625dff 100644 --- a/ush/machine/wcoss2.yaml +++ b/ush/machine/wcoss2.yaml @@ -41,21 +41,6 @@ data: RAP: compath.py ${envir}/rap/${rap_ver}/rap.${PDYext} NAM: compath.py ${envir}/nam/${nam_ver}/nam.${PDYext} HRRR: compath.py ${envir}/hrrr/${hrrr_ver}/hrrr.${PDYext}/conus -cpl_aqm_parm: - AQM_CONFIG_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/aqm/epa/data - DCOMINbio_default: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/aqm/bio - DCOMINdust_default: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/FENGSHA - DCOMINcanopy_default: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/canopy - DCOMINfire_default: /lfs/h1/ops/dev/dcom - DCOMINchem_lbcs_default: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/LBCS/AQM_NA13km_AM4_v1 - DCOMINgefs_default: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/GEFS_DATA - DCOMINpt_src_default: /lfs/h2/emc/physics/noscrub/Youhua.Tang/nei2016v1-pt/v2023-01-PT - DCOMINairnow_default: /lfs/h1/ops/prod/dcom - COMINbicor: /lfs/h2/emc/physics/noscrub/jianping.huang/Bias_correction/aqmv7.0.81 - COMOUTbicor: /lfs/h2/emc/physics/noscrub/jianping.huang/Bias_correction/aqmv7.0.81 - NEXUS_INPUT_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/nexus_emissions - NEXUS_FIX_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/nexus/fix - NEXUS_GFS_SFC_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/GFS_DATA rocoto: tasks: diff --git a/ush/setup.py b/ush/setup.py index 1d574ec18c..cdea8fde0f 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -1161,49 +1161,15 @@ def get_location(xcs, fmt, expt_cfg): # # ----------------------------------------------------------------------- # - - # These NCO variables need to be set based on the user's specified - # run environment. The default is set in config_defaults for nco. If - # running in community mode, we set these paths to the experiment - # directory. - nco_vars = [ - "opsroot_default", - "comroot_default", - "dataroot_default", - "dcomroot_default", - "comin_basedir", - "comout_basedir", - ] - - nco_config = expt_config["nco"] - if run_envir != "nco": - # Put the variables in config dict. - for nco_var in nco_vars: - nco_config[nco_var.upper()] = exptdir - # Use env variables for NCO variables and create NCO directories workflow_manager = expt_config["platform"].get("WORKFLOW_MANAGER") if run_envir == "nco" and workflow_manager == "rocoto": - for nco_var in nco_vars: - envar = os.environ.get(nco_var) - if envar is not None: - nco_config[nco_var.upper()] = envar - - mkdir_vrfy(f' -p "{nco_config.get("OPSROOT_default")}"') - mkdir_vrfy(f' -p "{nco_config.get("COMROOT_default")}"') - mkdir_vrfy(f' -p "{nco_config.get("DATAROOT_default")}"') - mkdir_vrfy(f' -p "{nco_config.get("DCOMROOT_default")}"') - # Update the rocoto string for the fcst output location if # running an ensemble in nco mode if global_sect["DO_ENSEMBLE"]: rocoto_config["entities"]["FCST_DIR"] = \ - "{{ nco.DATAROOT_default }}/run_fcst_mem#mem#.{{ workflow.WORKFLOW_ID }}_@Y@m@d@H" - - if nco_config["DBNROOT_default"] and workflow_manager == "rocoto": - mkdir_vrfy(f' -p "{nco_config["DBNROOT_default"]}"') + "{{ nco.PTMP }}/{{ nco.envir_default }}/tmp/run_fcst_mem#mem#.{{ workflow.WORKFLOW_ID }}_@Y@m@d@H" - mkdir_vrfy(f' -p "{nco_config.get("LOGBASEDIR_default")}"') # create experiment dir mkdir_vrfy(f' -p "{exptdir}"') From 89b59ec4e18ea6df0f8e4ba8225b8f5b5806f599 Mon Sep 17 00:00:00 2001 From: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com> Date: Wed, 27 Mar 2024 12:38:23 -0400 Subject: [PATCH 12/42] [develop] Update weather model, UPP, and UFS_UTILS hashes (#1050) Updating the ufs-weather-model hash to 8518c2c (March 1), the UPP hash to 945cb2c (January 23), and the UFS_UTILS hash to 57bd832 (February 6). This work also required several modifications to allow the updated weather model and UFS_UTILS hashes to work in the SRW: * Update spack-stack to v1.5.1 * Rename NEMS/nems to UFS/ufs * Remove ush/set_ozone_param.py (ozphys scheme in SDFs were removed in the weather model) * Update path to noahmptable.tbl * Add two new fields to INPS (MASK_ONLY and MERGE_FILE) for make_orog task * Make changes to allow for the updated method of finding CRES in chgres_cube --- .cicd/scripts/sbatch_srw_ftest.sh | 2 +- .cicd/scripts/srw_ftest.sh | 5 +- .cicd/scripts/wrapper_srw_ftest.sh | 4 + Externals.cfg | 6 +- .../CustomizingTheWorkflow/ConfigWorkflow.rst | 16 +-- modulefiles/build_derecho_intel.lua | 2 +- modulefiles/build_gaea_intel.lua | 2 +- modulefiles/build_hera_gnu.lua | 2 +- modulefiles/build_hera_intel.lua | 2 +- modulefiles/build_hercules_intel.lua | 2 +- modulefiles/build_jet_intel.lua | 2 +- modulefiles/build_noaacloud_intel.lua | 2 +- modulefiles/build_orion_intel.lua | 2 +- modulefiles/srw_common.lua | 8 +- parm/fixed_files_mapping.yaml | 2 +- parm/model_configure | 3 + parm/{nems.configure => ufs.configure} | 23 +--- scripts/exregional_make_orog.sh | 2 + scripts/exregional_run_fcst.sh | 2 +- ..._FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml | 2 + ...3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR.yaml | 2 + ...m_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml | 3 +- ...pact_3km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml | 2 + ...m_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml | 2 + .../config.specify_template_filenames.yaml | 2 +- .../test_create_model_configure_file.py | 2 + tests/test_python/test_set_ozone_param.py | 45 ------ ush/config_defaults.yaml | 28 ++-- ush/create_model_configure_file.py | 2 + ...e_file.py => create_ufs_configure_file.py} | 31 ++--- ush/set_ozone_param.py | 130 ------------------ ush/setup.py | 40 +----- 32 files changed, 88 insertions(+), 292 deletions(-) rename parm/{nems.configure => ufs.configure} (70%) delete mode 100644 tests/test_python/test_set_ozone_param.py rename ush/{create_nems_configure_file.py => create_ufs_configure_file.py} (79%) delete mode 100644 ush/set_ozone_param.py diff --git a/.cicd/scripts/sbatch_srw_ftest.sh b/.cicd/scripts/sbatch_srw_ftest.sh index 5add5368b5..07888d5ae9 100644 --- a/.cicd/scripts/sbatch_srw_ftest.sh +++ b/.cicd/scripts/sbatch_srw_ftest.sh @@ -7,7 +7,7 @@ #SBATCH --account=${SRW_PROJECT} #SBATCH --qos=batch #SBATCH --nodes=1 -#SBATCH --tasks-per-node=24 +#SBATCH --tasks-per-node=12 #SBATCH --cpus-per-task=1 #SBATCH -t 00:30:00 #SBATCH -o log_wrap.%j.log diff --git a/.cicd/scripts/srw_ftest.sh b/.cicd/scripts/srw_ftest.sh index d98d20c831..b77ee767f3 100755 --- a/.cicd/scripts/srw_ftest.sh +++ b/.cicd/scripts/srw_ftest.sh @@ -77,6 +77,9 @@ sed "s|^task_get_extrn_lbcs:|task_get_extrn_lbcs:\n EXTRN_MDL_SOURCE_BASEDIR_LB # Use staged data for HPSS supported machines sed 's|^platform:|platform:\n EXTRN_MDL_DATA_STORES: disk|g' -i ush/config.yaml +# Set OMP_NUM_THREADS_RUN_FCST to 1 in config.yaml +sed 's|^task_run_fcst:|task_run_fcst:\n OMP_NUM_THREADS_RUN_FCST: 1|1' -i ush/config.yaml + # Activate the workflow environment ... source etc/lmod-setup.sh ${platform,,} module use modulefiles @@ -105,7 +108,7 @@ cp ${workspace}/ush/wrappers/*.sh . export JOBSdir=${workspace}/jobs export USHdir=${workspace}/ush export OMP_NUM_THREADS=1 -export nprocs=24 +export nprocs=12 [[ -n ${TASKS} ]] || TASKS=( run_make_grid diff --git a/.cicd/scripts/wrapper_srw_ftest.sh b/.cicd/scripts/wrapper_srw_ftest.sh index fabdbb63ef..c6a4d19568 100755 --- a/.cicd/scripts/wrapper_srw_ftest.sh +++ b/.cicd/scripts/wrapper_srw_ftest.sh @@ -34,6 +34,10 @@ if [[ "${SRW_PLATFORM}" == hera ]]; then fi fi +if [[ "${SRW_PLATFORM}" == jet ]]; then + sed -i '15i #SBATCH --partition=xjet' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh +fi + # Call job card and return job_id echo "Running: ${workflow_cmd} -A ${SRW_PROJECT} ${arg_1} ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh" job_id=$(${workflow_cmd} -A ${SRW_PROJECT} ${arg_1} ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh) diff --git a/Externals.cfg b/Externals.cfg index 4b54c71d72..49ea5ffc38 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -3,7 +3,7 @@ protocol = git repo_url = https://github.com/ufs-community/UFS_UTILS # Specify either a branch name or a hash but not both. #branch = develop -hash = dc0e4a6 +hash = 57bd832 local_path = sorc/UFS_UTILS required = True @@ -12,7 +12,7 @@ protocol = git repo_url = https://github.com/ufs-community/ufs-weather-model # Specify either a branch name or a hash but not both. #branch = develop -hash = 020e783 +hash = 8518c2c local_path = sorc/ufs-weather-model required = True @@ -21,7 +21,7 @@ protocol = git repo_url = https://github.com/NOAA-EMC/UPP # Specify either a branch name or a hash but not both. #branch = develop -hash = fae617b +hash = 945cb2c local_path = sorc/UPP required = True diff --git a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst index 10a0bcc4eb..3bfa5bdf7d 100644 --- a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst +++ b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst @@ -386,8 +386,8 @@ Set File Name Parameters ``MODEL_CONFIG_FN``: (Default: "model_configure") Name of a file that contains settings and configurations for the :term:`NUOPC`/:term:`ESMF` main component. In general, users should not set this variable in their configuration file (see :ref:`note `). -``NEMS_CONFIG_FN``: (Default: "nems.configure") - Name of a file that contains information about the various :term:`NEMS` components and their run sequence. In general, users should not set this variable in their configuration file (see :ref:`note `). +``UFS_CONFIG_FN``: (Default: "ufs.configure") + Name of a file that contains information about the various :term:`UFS` components and their run sequence. In general, users should not set this variable in their configuration file (see :ref:`note `). ``AQM_RC_FN``: (Default: "aqm.rc") Name of resource file for NOAA Air Quality Model (AQM). @@ -419,8 +419,8 @@ Set File Path Parameters ``MODEL_CONFIG_TMPL_FP``: (Default: ``'{{ [user.PARMdir, MODEL_CONFIG_FN]|path_join }}'``) Path to the ``MODEL_CONFIG_FN`` file. -``NEMS_CONFIG_TMPL_FP``: (Default: ``'{{ [user.PARMdir, NEMS_CONFIG_FN]|path_join }}'``) - Path to the ``NEMS_CONFIG_FN`` file. +``UFS_CONFIG_TMPL_FP``: (Default: ``'{{ [user.PARMdir, UFS_CONFIG_FN]|path_join }}'``) + Path to the ``UFS_CONFIG_FN`` file. ``AQM_RC_TMPL_FP``: (Default: ``'{{ [user.PARMdir, AQM_RC_TMPL_FN]|path_join }}'``) Path to the ``AQM_RC_TMPL_FN`` file. @@ -437,8 +437,8 @@ This section contains files and paths to files that are staged in the experiment ``FIELD_TABLE_FP``: (Default: ``'{{ [EXPTDIR, FIELD_TABLE_FN]|path_join }}'``) Path to the field table in the experiment directory. (The field table specifies tracers that the forecast model reads in.) -``NEMS_CONFIG_FP``: (Default: ``'{{ [EXPTDIR, NEMS_CONFIG_FN]|path_join }}'``) - Path to the ``NEMS_CONFIG_FN`` file in the experiment directory. +``UFS_CONFIG_FP``: (Default: ``'{{ [EXPTDIR, UFS_CONFIG_FN]|path_join }}'``) + Path to the ``UFS_CONFIG_FN`` file in the experiment directory. ``FV3_NML_FP``: (Default: ``'{{ [EXPTDIR, FV3_NML_FN]|path_join }}'``) Path to the ``FV3_NML_FN`` file in the experiment directory. @@ -538,7 +538,7 @@ CCPP Parameter Field Dictionary Parameters ------------------------------ -``FIELD_DICT_FN``: (Default: "fd_nems.yaml") +``FIELD_DICT_FN``: (Default: "fd_ufs.yaml") The name of the field dictionary file. This file is a community-based dictionary for shared coupling fields and is automatically generated by the :term:`NUOPC` Layer. ``FIELD_DICT_IN_UWM_FP``: (Default: ``'{{ [user.UFS_WTHR_MDL_DIR, "tests", "parm", FIELD_DICT_FN]|path_join }}'``) @@ -1109,7 +1109,7 @@ For each workflow task, certain parameter values must be passed to the job sched For more information, see the `Intel Development Reference Guide `__. ``OMP_NUM_THREADS_RUN_FCST``: (Default: 1) - The number of OpenMP threads to use for parallel regions. Corresponds to the ``ATM_omp_num_threads`` value in ``nems.configure``. + The number of OpenMP threads to use for parallel regions. Corresponds to the ``atmos_nthreads`` value in ``model_configure``. ``OMP_STACKSIZE_RUN_FCST``: (Default: "512m") Controls the size of the stack for threads created by the OpenMP implementation. diff --git a/modulefiles/build_derecho_intel.lua b/modulefiles/build_derecho_intel.lua index d434e8ecd3..ac98c39e53 100644 --- a/modulefiles/build_derecho_intel.lua +++ b/modulefiles/build_derecho_intel.lua @@ -6,7 +6,7 @@ the CISL machine Derecho (Cray) using Intel@2021.10.0 whatis([===[Loads libraries needed for building the UFS SRW App on Derecho ]===]) prepend_path("MODULEPATH","/lustre/desc1/scratch/epicufsrt/contrib/modulefiles_extra") -prepend_path("MODULEPATH", "/glade/work/epicufsrt/contrib/spack-stack/derecho/spack-stack-1.5.0/envs/unified-env/install/modulefiles/Core") +prepend_path("MODULEPATH", "/glade/work/epicufsrt/contrib/spack-stack/derecho/spack-stack-1.5.1/envs/unified-env/install/modulefiles/Core") load(pathJoin("stack-intel", os.getenv("stack_intel_ver") or "2021.10.0")) load(pathJoin("stack-cray-mpich", os.getenv("stack_cray_mpich_ver") or "8.1.25")) diff --git a/modulefiles/build_gaea_intel.lua b/modulefiles/build_gaea_intel.lua index 9c21f685da..0eca20b5e1 100644 --- a/modulefiles/build_gaea_intel.lua +++ b/modulefiles/build_gaea_intel.lua @@ -5,7 +5,7 @@ the NOAA RDHPC machine Gaea C5 using Intel-2023.1.0 whatis([===[Loads libraries needed for building the UFS SRW App on Gaea C5 ]===]) -prepend_path("MODULEPATH","/ncrc/proj/epic/spack-stack/spack-stack-1.5.0/envs/unified-env/install/modulefiles/Core") +prepend_path("MODULEPATH","/ncrc/proj/epic/spack-stack/spack-stack-1.5.1/envs/unified-env/install/modulefiles/Core") stack_intel_ver=os.getenv("stack_intel_ver") or "2023.1.0" load(pathJoin("stack-intel", stack_intel_ver)) diff --git a/modulefiles/build_hera_gnu.lua b/modulefiles/build_hera_gnu.lua index d5f78f397b..5355895da9 100644 --- a/modulefiles/build_hera_gnu.lua +++ b/modulefiles/build_hera_gnu.lua @@ -5,7 +5,7 @@ the NOAA RDHPC machine Hera using GNU 9.2.0 whatis([===[Loads libraries needed for building the UFS SRW App on Hera using GNU 9.2.0 ]===]) -prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.0/envs/unified-env-rocky8/install/modulefiles/Core") +prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.1/envs/unified-env-rocky8/install/modulefiles/Core") prepend_path("MODULEPATH", "/scratch1/NCEPDEV/jcsda/jedipara/spack-stack/modulefiles") load("stack-gcc/9.2.0") diff --git a/modulefiles/build_hera_intel.lua b/modulefiles/build_hera_intel.lua index 061feef67b..ee11e4a386 100644 --- a/modulefiles/build_hera_intel.lua +++ b/modulefiles/build_hera_intel.lua @@ -8,7 +8,7 @@ whatis([===[Loads libraries needed for building the UFS SRW App on Hera ]===]) prepend_path("MODULEPATH","/contrib/sutils/modulefiles") load("sutils") -prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.0/envs/unified-env-rocky8/install/modulefiles/Core") +prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.1/envs/unified-env-rocky8/install/modulefiles/Core") prepend_path("MODULEPATH", "/scratch1/NCEPDEV/jcsda/jedipara/spack-stack/modulefiles") stack_intel_ver=os.getenv("stack_intel_ver") or "2021.5.0" diff --git a/modulefiles/build_hercules_intel.lua b/modulefiles/build_hercules_intel.lua index 1cb402b8a6..cec2a3a30e 100644 --- a/modulefiles/build_hercules_intel.lua +++ b/modulefiles/build_hercules_intel.lua @@ -5,7 +5,7 @@ the MSU machine Hercules using intel-oneapi-compilers/2022.2.1 whatis([===[Loads libraries needed for building the UFS SRW App on Hercules ]===]) -prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.5.0/envs/unified-env/install/modulefiles/Core") +prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.5.1/envs/unified-env/install/modulefiles/Core") prepend_path("MODULEPATH", "/work/noaa/da/role-da/spack-stack/modulefiles") load("stack-intel/2021.9.0") diff --git a/modulefiles/build_jet_intel.lua b/modulefiles/build_jet_intel.lua index eb2893d3cc..925fef3853 100644 --- a/modulefiles/build_jet_intel.lua +++ b/modulefiles/build_jet_intel.lua @@ -5,7 +5,7 @@ the NOAA RDHPC machine Jet using Intel-2021.5.0 whatis([===[Loads libraries needed for building the UFS SRW App on Jet ]===]) -prepend_path("MODULEPATH","/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.5.0/envs/unified-env-rocky8/install/modulefiles/Core") +prepend_path("MODULEPATH","/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.5.1/envs/unified-env-rocky8/install/modulefiles/Core") prepend_path("MODULEPATH", "/lfs4/HFIP/hfv3gfs/spack-stack/modulefiles") load("stack-intel/2021.5.0") diff --git a/modulefiles/build_noaacloud_intel.lua b/modulefiles/build_noaacloud_intel.lua index 50f1aec9ab..0b6a9c1ca4 100644 --- a/modulefiles/build_noaacloud_intel.lua +++ b/modulefiles/build_noaacloud_intel.lua @@ -5,7 +5,7 @@ the NOAA cloud using Intel-oneapi whatis([===[Loads libraries needed for building the UFS SRW App on NOAA cloud ]===]) -prepend_path("MODULEPATH", "/contrib/spack-stack/spack-stack-1.5.0/envs/unified-env/install/modulefiles/Core") +prepend_path("MODULEPATH", "/contrib/spack-stack/spack-stack-1.5.1/envs/unified-env/install/modulefiles/Core") prepend_path("MODULEPATH", "/apps/modules/modulefiles") prepend_path("PATH", "/contrib/EPIC/bin") load("stack-intel") diff --git a/modulefiles/build_orion_intel.lua b/modulefiles/build_orion_intel.lua index 241b658c88..d3e777d1dc 100644 --- a/modulefiles/build_orion_intel.lua +++ b/modulefiles/build_orion_intel.lua @@ -5,7 +5,7 @@ the MSU machine Orion using Intel-2022.1.2 whatis([===[Loads libraries needed for building the UFS SRW App on Orion ]===]) -prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.5.0/envs/unified-env/install/modulefiles/Core") +prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.5.1/envs/unified-env/install/modulefiles/Core") prepend_path("MODULEPATH", "/work/noaa/da/role-da/spack-stack/modulefiles") load("stack-intel/2022.0.2") diff --git a/modulefiles/srw_common.lua b/modulefiles/srw_common.lua index 2bcbc1f5f7..79c67283f9 100644 --- a/modulefiles/srw_common.lua +++ b/modulefiles/srw_common.lua @@ -5,8 +5,8 @@ load("libpng/1.6.37") load("netcdf-c/4.9.2") load("netcdf-fortran/4.6.0") load("parallelio/2.5.10") -load("esmf/8.4.2") -load("fms/2023.01") +load("esmf/8.5.0") +load("fms/2023.02.01") load("bacio/2.4.1") load("crtm/2.4.0") @@ -16,8 +16,8 @@ load("ip/4.3.0") load("sp/2.3.3") load("w3emc/2.10.0") -load("gftl-shared/1.5.0") -load("mapl/2.35.2-esmf-8.4.2") +load("gftl-shared/1.6.1") +load("mapl/2.40.3-esmf-8.5.0") load("nemsio/2.5.4") load("sfcio/1.4.1") diff --git a/parm/fixed_files_mapping.yaml b/parm/fixed_files_mapping.yaml index 54ddd41a81..49d3191de5 100644 --- a/parm/fixed_files_mapping.yaml +++ b/parm/fixed_files_mapping.yaml @@ -175,7 +175,7 @@ fixed_files: "global_tg3clim.2.6x1.5.grb | global_tg3clim.2.6x1.5.grb", "sfc_emissivity_idx.txt | global_sfc_emissivity_idx.txt", "solarconstant_noaa_an.txt | global_solarconstant_noaa_an.txt", - "global_o3prdlos.f77 | " + "global_o3prdlos.f77 | ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77" ] # diff --git a/parm/model_configure b/parm/model_configure index d22adf3f3a..aeb45f4719 100644 --- a/parm/model_configure +++ b/parm/model_configure @@ -1,3 +1,5 @@ +total_member: 1 +PE_MEMBER01: {{ PE_MEMBER01 }} start_year: {{ start_year }} start_month: {{ start_month }} start_day: {{ start_day }} @@ -11,6 +13,7 @@ ENS_SPS: .false. dt_atmos: {{ dt_atmos }} calendar: 'julian' memuse_verbose: .false. +atmos_nthreads: {{ atmos_nthreads }} restart_interval: {{ restart_interval }} output_1st_tstep_rst: .false. write_dopost: {{ write_dopost }} diff --git a/parm/nems.configure b/parm/ufs.configure similarity index 70% rename from parm/nems.configure rename to parm/ufs.configure index 14d9503c47..d90b7447f4 100644 --- a/parm/nems.configure +++ b/parm/ufs.configure @@ -1,5 +1,5 @@ ############################################# -#### NEMS Run-Time Configuration File ##### +#### UFS Run-Time Configuration File ##### ############################################# # ESMF # @@ -45,21 +45,8 @@ runSeq:: {% else %} # EARTH # EARTH_component_list: ATM -EARTH_attributes:: - Verbosity = 0 -:: - -# ATM # -ATM_model: fv3 -ATM_petlist_bounds: 0 {{ pe_member01_m1 }} -ATM_omp_num_threads: {{ atm_omp_num_threads }} -ATM_attributes:: - Verbosity = 0 - Diagnostic = 0 -:: - -# Run Sequence # -runSeq:: - ATM -:: + ATM_model: fv3 + runSeq:: + ATM + :: {% endif %} diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index 4fcf34a4ad..47430a802d 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -186,6 +186,8 @@ echo $mtnres $lonb $latb $jcap $NR $NF1 $NF2 $efac $blat > "${input_redirect_fn} # echo "\"${grid_fp}\"" >> "${input_redirect_fn}" echo "\"$orogfile\"" >> "${input_redirect_fn}" +echo ".false." >> "${input_redirect_fn}" #MASK_ONLY +echo "none" >> "${input_redirect_fn}" #MERGE_FILE cat "${input_redirect_fn}" # #----------------------------------------------------------------------- diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index 0013fad47d..9e6dc38584 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -696,7 +696,7 @@ fi # #----------------------------------------------------------------------- # -python3 $USHdir/create_nems_configure_file.py \ +python3 $USHdir/create_ufs_configure_file.py \ --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ --run-dir "${DATA}" export err=$? diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml index f4c40bf722..de456cea73 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml @@ -20,3 +20,5 @@ task_get_extrn_lbcs: EXTRN_MDL_NAME_LBCS: FV3GFS LBC_SPEC_INTVL_HRS: 3 USE_USER_STAGED_EXTRN_FILES: true +task_run_fcst: + OMP_NUM_THREADS_RUN_FCST: 3 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR.yaml index 6d4dbc3b33..4a340185f3 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR.yaml @@ -19,3 +19,5 @@ task_get_extrn_lbcs: EXTRN_MDL_NAME_LBCS: FV3GFS LBC_SPEC_INTVL_HRS: 3 USE_USER_STAGED_EXTRN_FILES: true +task_run_fcst: + OMP_NUM_THREADS_RUN_FCST: 3 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml index dd5f5a464a..2e4f1dc22f 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml @@ -3,11 +3,10 @@ metadata: This test is to ensure that the workflow running in community mode completes successfully on the RRFS_CONUScompact_25km grid using the RRFS_v1beta physics suite with ICs derived from the HRRR and LBCs derived from the RAP. - It also tests the "DOT_OR_USCORE" option and enables offline UPP 2D decomposition. + It also enables offline UPP 2D decomposition. user: RUN_ENVIR: community workflow: - DOT_OR_USCORE: . CCPP_PHYS_SUITE: FV3_RRFS_v1beta PREDEF_GRID_NAME: RRFS_CONUScompact_25km DATE_FIRST_CYCL: '2020081000' diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml index b00a24ae84..35be12a1ee 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml @@ -23,3 +23,5 @@ task_get_extrn_lbcs: USE_USER_STAGED_EXTRN_FILES: true EXTRN_MDL_FILES_LBCS: - '{yy}{jjj}{hh}00{fcst_hr:02d}00' +task_run_fcst: + OMP_NUM_THREADS_RUN_FCST: 3 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml index 44dfec5e75..1265fa8e0c 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml @@ -24,3 +24,5 @@ task_get_extrn_lbcs: USE_USER_STAGED_EXTRN_FILES: true EXTRN_MDL_FILES_LBCS: - '{yy}{jjj}{hh}00{fcst_hr:02d}00' +task_run_fcst: + OMP_NUM_THREADS_RUN_FCST: 3 diff --git a/tests/WE2E/test_configs/wflow_features/config.specify_template_filenames.yaml b/tests/WE2E/test_configs/wflow_features/config.specify_template_filenames.yaml index 2c39bc388e..996ea2e7d5 100644 --- a/tests/WE2E/test_configs/wflow_features/config.specify_template_filenames.yaml +++ b/tests/WE2E/test_configs/wflow_features/config.specify_template_filenames.yaml @@ -9,7 +9,7 @@ workflow: DIAG_TABLE_TMPL_FN: diag_table.FV3_GFS_v15p2 FIELD_TABLE_TMPL_FN: field_table.FV3_GFS_v15p2 MODEL_CONFIG_FN: model_configure - NEMS_CONFIG_FN: nems.configure + UFS_CONFIG_FN: ufs.configure CCPP_PHYS_SUITE: FV3_GFS_v15p2 PREDEF_GRID_NAME: RRFS_CONUS_25km DATE_FIRST_CYCL: '2019070100' diff --git a/tests/test_python/test_create_model_configure_file.py b/tests/test_python/test_create_model_configure_file.py index 9475028505..d5aea79ed8 100644 --- a/tests/test_python/test_create_model_configure_file.py +++ b/tests/test_python/test_create_model_configure_file.py @@ -43,9 +43,11 @@ def setUp(self): set_env_var("USHdir", USHdir) set_env_var("MODEL_CONFIG_FN", MODEL_CONFIG_FN) set_env_var("MODEL_CONFIG_TMPL_FP", MODEL_CONFIG_TMPL_FP) + set_env_var("PE_MEMBER01", 24) set_env_var("FCST_LEN_HRS", 72) set_env_var("FHROT", 0) set_env_var("DT_ATMOS", 1) + set_env_var("OMP_NUM_THREADS_RUN_FCST", 1) set_env_var("RESTART_INTERVAL", 4) set_env_var("ITASKS", 1) diff --git a/tests/test_python/test_set_ozone_param.py b/tests/test_python/test_set_ozone_param.py deleted file mode 100644 index 1d0e6d6aa7..0000000000 --- a/tests/test_python/test_set_ozone_param.py +++ /dev/null @@ -1,45 +0,0 @@ -""" Tests for set_ozone_param.py """ - -#pylint: disable=invalid-name - -import os -import unittest - -from set_ozone_param import set_ozone_param - -class Testing(unittest.TestCase): - """ Define the tests """ - def test_set_ozone_param(self): - """ Test that when the CCPP phyiscs suite XML is provided that - activates ozone, the expected ozone parameter is returned""" - test_dir = os.path.dirname(os.path.abspath(__file__)) - USHdir = os.path.join(test_dir, "..", "..", "ush") - ozone_param, _, _ = set_ozone_param( - os.path.join(USHdir, "test_data", "suite_FV3_GSD_SAR.xml"), - self.CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING, - ) - self.assertEqual("ozphys_2015", ozone_param) - - def setUp(self): - self.CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING = [ - "aerosol.dat | global_climaeropac_global.txt", - "co2historicaldata_2010.txt | fix_co2_proj/global_co2historicaldata_2010.txt", - "co2historicaldata_2011.txt | fix_co2_proj/global_co2historicaldata_2011.txt", - "co2historicaldata_2012.txt | fix_co2_proj/global_co2historicaldata_2012.txt", - "co2historicaldata_2013.txt | fix_co2_proj/global_co2historicaldata_2013.txt", - "co2historicaldata_2014.txt | fix_co2_proj/global_co2historicaldata_2014.txt", - "co2historicaldata_2015.txt | fix_co2_proj/global_co2historicaldata_2015.txt", - "co2historicaldata_2016.txt | fix_co2_proj/global_co2historicaldata_2016.txt", - "co2historicaldata_2017.txt | fix_co2_proj/global_co2historicaldata_2017.txt", - "co2historicaldata_2018.txt | fix_co2_proj/global_co2historicaldata_2018.txt", - "co2historicaldata_2019.txt | fix_co2_proj/global_co2historicaldata_2019.txt", - "co2historicaldata_2020.txt | fix_co2_proj/global_co2historicaldata_2020.txt", - "co2historicaldata_2021.txt | fix_co2_proj/global_co2historicaldata_2021.txt", - "co2historicaldata_glob.txt | global_co2historicaldata_glob.txt", - "co2monthlycyc.txt | co2monthlycyc.txt", - "global_h2oprdlos.f77 | global_h2o_pltc.f77", - "global_zorclim.1x1.grb | global_zorclim.1x1.grb", - "sfc_emissivity_idx.txt | global_sfc_emissivity_idx.txt", - "solarconstant_noaa_an.txt | global_solarconstant_noaa_an.txt", - "global_o3prdlos.f77 | ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77", - ] diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 6e7823c5d2..6a403754cb 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -610,9 +610,9 @@ workflow: # NUOPC/ESMF main component (ufs-weather-model: model_config). Its default # value is the name of the file that the ufs weather model expects to read in. # - # NEMS_CONFIG_FN: - # Name of a template file that contains information about the various NEMS - # components and their run sequence (ufs-weather-model: nems.configure). + # UFS_CONFIG_FN: + # Name of a template file that contains information about the various UFS + # components and their run sequence (ufs-weather-model: ufs.configure). # Its default value is the name of the file that the ufs weather model expects # to read in. # @@ -641,7 +641,7 @@ workflow: DIAG_TABLE_TMPL_FN: 'diag_table.{{ CCPP_PHYS_SUITE }}' FIELD_TABLE_TMPL_FN: 'field_table.{{ CCPP_PHYS_SUITE }}' MODEL_CONFIG_FN: "model_configure" - NEMS_CONFIG_FN: "nems.configure" + UFS_CONFIG_FN: "ufs.configure" AQM_RC_FN: "aqm.rc" AQM_RC_TMPL_FN: "aqm.rc" @@ -668,8 +668,8 @@ workflow: # MODEL_CONFIG_TMPL_FP: # Path to the MODEL_CONFIG_FN file. # - # NEMS_CONFIG_TMPL_FP: - # Path to the NEMS_CONFIG_FN file. + # UFS_CONFIG_TMPL_FP: + # Path to the UFS_CONFIG_FN file. # # AQM_RC_TMPL_FP: # Path to the AQM_RC_TMPL_FN file. @@ -684,7 +684,7 @@ workflow: DIAG_TABLE_TMPL_FP: '{{ [user.PARMdir, DIAG_TABLE_TMPL_FN]|path_join }}' FIELD_TABLE_TMPL_FP: '{{ [user.PARMdir, FIELD_TABLE_TMPL_FN]|path_join }}' MODEL_CONFIG_TMPL_FP: '{{ [user.PARMdir, MODEL_CONFIG_FN]|path_join }}' - NEMS_CONFIG_TMPL_FP: '{{ [user.PARMdir, NEMS_CONFIG_FN]|path_join }}' + UFS_CONFIG_TMPL_FP: '{{ [user.PARMdir, UFS_CONFIG_FN]|path_join }}' AQM_RC_TMPL_FP: '{{ [user.PARMdir, AQM_RC_TMPL_FN]|path_join }}' # @@ -697,8 +697,8 @@ workflow: # FIELD_TABLE_FP: # Path to the field table in the experiment directory. # - # NEMS_CONFIG_FP: - # Path to the NEMS_CONFIG_FN file in the experiment directory. + # UFS_CONFIG_FP: + # Path to the UFS_CONFIG_FN file in the experiment directory. # # FV3_NML_FP: # Path to the FV3_NML_FN file in the experiment directory. @@ -759,7 +759,7 @@ workflow: # DATA_TABLE_FP: '{{ [EXPTDIR, DATA_TABLE_FN]|path_join }}' FIELD_TABLE_FP: '{{ [EXPTDIR, FIELD_TABLE_FN]|path_join }}' - NEMS_CONFIG_FP: '{{ [EXPTDIR, NEMS_CONFIG_FN]|path_join }}' + UFS_CONFIG_FP: '{{ [EXPTDIR, UFS_CONFIG_FN]|path_join }}' FV3_NML_FP: '{{ [EXPTDIR, FV3_NML_FN]|path_join }}' FV3_NML_STOCH_FP: '{{ [EXPTDIR, [FV3_NML_FN, "_stoch"]|join ]|path_join }}' @@ -846,7 +846,7 @@ workflow: CCPP_PHYS_SUITE_FN: 'suite_{{ CCPP_PHYS_SUITE }}.xml' CCPP_PHYS_SUITE_IN_CCPP_FP: '{{ [user.UFS_WTHR_MDL_DIR, "FV3", "ccpp", "suites", CCPP_PHYS_SUITE_FN] |path_join }}' CCPP_PHYS_SUITE_FP: '{{ [workflow.EXPTDIR, CCPP_PHYS_SUITE_FN]|path_join }}' - CCPP_PHYS_DIR: '{{ [user.UFS_WTHR_MDL_DIR, "FV3", "ccpp", "physics", "physics"] |path_join }}' + CCPP_PHYS_DIR: '{{ [user.UFS_WTHR_MDL_DIR, "FV3", "ccpp", "physics", "physics", "SFC_Models", "Land", "Noahmp"] |path_join }}' # #----------------------------------------------------------------------- # @@ -866,7 +866,7 @@ workflow: # #----------------------------------------------------------------------- # - FIELD_DICT_FN: "fd_nems.yaml" + FIELD_DICT_FN: "fd_ufs.yaml" FIELD_DICT_IN_UWM_FP: '{{ [user.UFS_WTHR_MDL_DIR, "tests", "parm", FIELD_DICT_FN]|path_join }}' FIELD_DICT_FP: '{{ [workflow.EXPTDIR, FIELD_DICT_FN]|path_join }}' # @@ -1728,8 +1728,8 @@ task_run_fcst: #----------------------------------------------------------------------- # KMP_AFFINITY_RUN_FCST: "scatter" - OMP_NUM_THREADS_RUN_FCST: 1 # ATM_omp_num_threads in nems.configure - OMP_STACKSIZE_RUN_FCST: "512m" + OMP_NUM_THREADS_RUN_FCST: 2 # atmos_nthreads in model_configure + OMP_STACKSIZE_RUN_FCST: "1024m" # #----------------------------------------------------------------------- # diff --git a/ush/create_model_configure_file.py b/ush/create_model_configure_file.py index c2778f1be5..cd10ac404e 100644 --- a/ush/create_model_configure_file.py +++ b/ush/create_model_configure_file.py @@ -71,6 +71,7 @@ def create_model_configure_file( # ----------------------------------------------------------------------- # settings = { + "PE_MEMBER01": PE_MEMBER01, "start_year": cdate.year, "start_month": cdate.month, "start_day": cdate.day, @@ -78,6 +79,7 @@ def create_model_configure_file( "nhours_fcst": fcst_len_hrs, "fhrot": fhrot, "dt_atmos": DT_ATMOS, + "atmos_nthreads": OMP_NUM_THREADS_RUN_FCST, "restart_interval": RESTART_INTERVAL, "itasks": ITASKS, "write_dopost": f".{lowercase(str(WRITE_DOPOST))}.", diff --git a/ush/create_nems_configure_file.py b/ush/create_ufs_configure_file.py similarity index 79% rename from ush/create_nems_configure_file.py rename to ush/create_ufs_configure_file.py index a6ba1cbd6b..03de3e24c7 100644 --- a/ush/create_nems_configure_file.py +++ b/ush/create_ufs_configure_file.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 """ -Function to create a NEMS configuration file for the FV3 forecast +Function to create a UFS configuration file for the FV3 forecast model(s) from a template. """ @@ -21,8 +21,8 @@ print_input_args, ) -def create_nems_configure_file(run_dir): - """ Creates a nems configuration file in the specified +def create_ufs_configure_file(run_dir): + """ Creates a ufs configuration file in the specified run directory Args: @@ -41,19 +41,18 @@ def create_nems_configure_file(run_dir): # #----------------------------------------------------------------------- # - # Create a NEMS configuration file in the specified run directory. + # Create a UFS configuration file in the specified run directory. # #----------------------------------------------------------------------- # print_info_msg(f''' - Creating a nems.configure file (\"{NEMS_CONFIG_FN}\") in the specified + Creating a ufs.configure file (\"{UFS_CONFIG_FN}\") in the specified run directory (run_dir): run_dir = \"{run_dir}\"''', verbose=VERBOSE) # # Set output file path # - nems_config_fp = os.path.join(run_dir, NEMS_CONFIG_FN) - pe_member01_m1 = str(int(PE_MEMBER01)-1) + ufs_config_fp = os.path.join(run_dir, UFS_CONFIG_FN) # #----------------------------------------------------------------------- # @@ -66,16 +65,14 @@ def create_nems_configure_file(run_dir): settings = { "dt_atmos": DT_ATMOS, "print_esmf": PRINT_ESMF, - "cpl_aqm": CPL_AQM, - "pe_member01_m1": pe_member01_m1, - "atm_omp_num_threads": OMP_NUM_THREADS_RUN_FCST, + "cpl_aqm": CPL_AQM } settings_str = cfg_to_yaml_str(settings) print_info_msg( dedent( f""" - The variable \"settings\" specifying values to be used in the \"{NEMS_CONFIG_FN}\" + The variable \"settings\" specifying values to be used in the \"{UFS_CONFIG_FN}\" file has been set as follows:\n settings =\n\n""" ) @@ -85,7 +82,7 @@ def create_nems_configure_file(run_dir): # #----------------------------------------------------------------------- # - # Call a python script to generate the experiment's actual NEMS_CONFIG_FN + # Call a python script to generate the experiment's actual UFS_CONFIG_FN # file from the template file. # #----------------------------------------------------------------------- @@ -93,14 +90,14 @@ def create_nems_configure_file(run_dir): # Store the settings in a temporary file with tempfile.NamedTemporaryFile(dir="./", mode="w+t", - prefix="nems_config_settings", + prefix="ufs_config_settings", suffix=".yaml") as tmpfile: tmpfile.write(settings_str) tmpfile.seek(0) cmd = " ".join(["uw template render", - "-i", NEMS_CONFIG_TMPL_FP, - "-o", nems_config_fp, + "-i", UFS_CONFIG_TMPL_FP, + "-o", ufs_config_fp, "-v", "--values-file", tmpfile.name, ] @@ -124,7 +121,7 @@ def create_nems_configure_file(run_dir): def parse_args(argv): """ Parse command line arguments""" parser = argparse.ArgumentParser( - description='Creates NEMS configuration file.' + description='Creates UFS configuration file.' ) parser.add_argument("-r", "--run-dir", @@ -144,6 +141,6 @@ def parse_args(argv): cfg = load_shell_config(args.path_to_defns) cfg = flatten_dict(cfg) import_vars(dictionary=cfg) - create_nems_configure_file( + create_ufs_configure_file( run_dir=args.run_dir, ) diff --git a/ush/set_ozone_param.py b/ush/set_ozone_param.py deleted file mode 100644 index 14a57b3fe9..0000000000 --- a/ush/set_ozone_param.py +++ /dev/null @@ -1,130 +0,0 @@ -#!/usr/bin/env python3 - -import copy -import os -from textwrap import dedent - -from python_utils import ( - log_info, - list_to_str, - print_input_args, - load_xml_file, - has_tag_with_value, - find_pattern_in_str, -) - - -def set_ozone_param(ccpp_phys_suite_fp, link_mappings): - """Function that does the following: - (1) Determines the ozone parameterization being used by checking in the - CCPP physics suite XML. - - (2) Sets the name of the global ozone production/loss file in the FIXgsm - FIXgsm system directory to copy to the experiment's FIXam directory. - - (3) Updates the symlink for the ozone file provided in link_mappings - list to include the name of global ozone production/loss file. - - Args: - ccpp_phys_suite_fp: full path to CCPP physics suite - link_mappings: list of mappings between symlinks and their - target files for this experiment - Returns: - ozone_param: a string - fixgsm_ozone_fn: a path to a fix file that should be used with - this experiment - ozone_link_mappings: a list of mappings for the files needed for - this experiment - - """ - - print_input_args(locals()) - - # - # ----------------------------------------------------------------------- - # - # Get the name of the ozone parameterization being used. There are two - # possible ozone parameterizations: - # - # (1) A parameterization developed/published in 2015. Here, we refer to - # this as the 2015 parameterization. If this is being used, then we - # set the variable ozone_param to the string "ozphys_2015". - # - # (2) A parameterization developed/published sometime after 2015. Here, - # we refer to this as the after-2015 parameterization. If this is - # being used, then we set the variable ozone_param to the string - # "ozphys". - # - # We check the CCPP physics suite definition file (SDF) to determine the - # parameterization being used. If this file contains the line - # - # ozphys_2015 - # - # then the 2015 parameterization is being used. If it instead contains - # the line - # - # ozphys - # - # then the after-2015 parameterization is being used. (The SDF should - # contain exactly one of these lines; not both nor neither; we check for - # this.) - # - # ----------------------------------------------------------------------- - # - tree = load_xml_file(ccpp_phys_suite_fp) - ozone_param = "" - if has_tag_with_value(tree, "scheme", "ozphys_2015"): - fixgsm_ozone_fn = "ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77" - ozone_param = "ozphys_2015" - elif has_tag_with_value(tree, "scheme", "ozphys"): - fixgsm_ozone_fn = "global_o3prdlos.f77" - ozone_param = "ozphys" - else: - raise KeyError( - f"Unknown or no ozone parameterization specified in the " - "CCPP physics suite file '{ccpp_phys_suite_fp}'" - ) - # - # ----------------------------------------------------------------------- - # - # Set the element in the array CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING that - # specifies the mapping between the symlink for the ozone production/loss - # file that must be created in each cycle directory and its target in the - # FIXam directory. The name of the symlink is already in the array, but - # the target is not because it depends on the ozone parameterization that - # the physics suite uses. Since we determined the ozone parameterization - # above, we now set the target of the symlink accordingly. - - # - # ----------------------------------------------------------------------- - # - # Set the mapping between the symlink and the target file we just - # found. The link name is already in the list, but the target file - # is not. - # - # ----------------------------------------------------------------------- - # - - ozone_symlink = "global_o3prdlos.f77" - fixgsm_ozone_fn_is_set = False - - ozone_link_mappings = copy.deepcopy(link_mappings) - for i, mapping in enumerate(ozone_link_mappings): - symlink = mapping.split("|")[0] - if symlink.strip() == ozone_symlink: - ozone_link_mappings[i] = f"{symlink}| {fixgsm_ozone_fn}" - fixgsm_ozone_fn_is_set = True - break - - # Make sure the list has been updated - if not fixgsm_ozone_fn_is_set: - - raise Exception( - f""" - Unable to set name of the ozone production/loss file in the FIXgsm directory - in the array that specifies the mapping between the symlinks that need to - be created in the cycle directories and the files in the FIXgsm directory: - fixgsm_ozone_fn_is_set = '{fixgsm_ozone_fn_is_set}'""" - ) - - return ozone_param, fixgsm_ozone_fn, ozone_link_mappings diff --git a/ush/setup.py b/ush/setup.py index cdea8fde0f..0511653fa2 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -39,7 +39,6 @@ from set_cycle_dates import set_cycle_dates from set_predef_grid_params import set_predef_grid_params -from set_ozone_param import set_ozone_param from set_gridparams_ESGgrid import set_gridparams_ESGgrid from set_gridparams_GFDLgrid import set_gridparams_GFDLgrid from link_fix import link_fix @@ -1230,43 +1229,6 @@ def get_location(xcs, fmt, expt_cfg): FIELD_DICT_IN_UWM_FP = '{field_dict_in_uwm_fp}'""" ) - fixed_files = expt_config["fixed_files"] - # Set the appropriate ozone production/loss file paths and symlinks - ozone_param, fixgsm_ozone_fn, ozone_link_mappings = set_ozone_param( - ccpp_phys_suite_in_ccpp_fp, - fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"], - ) - - # Reset the dummy value saved in the last list item to the ozone - # file name - fixed_files["FIXgsm_FILES_TO_COPY_TO_FIXam"][-1] = fixgsm_ozone_fn - - # Reset the experiment config list with the update list - fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"] = ozone_link_mappings - - log_info( - f""" - The ozone parameter used for this experiment is {ozone_param}. - """ - ) - - log_info( - f""" - The list that sets the mapping between symlinks in the cycle - directory, and the files in the FIXam directory has been updated - to include the ozone production/loss file. - """, - verbose=verbose, - ) - - log_info( - f""" - CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING = {list_to_str(ozone_link_mappings)} - """, - verbose=verbose, - dedent_=False, - ) - # # ----------------------------------------------------------------------- # @@ -1354,6 +1316,8 @@ def dict_find(user_dict, substring): (run_make_ics or run_make_lbcs), } + fixed_files = expt_config["fixed_files"] + prep_tasks = ["GRID", "OROG", "SFC_CLIMO"] res_in_fixlam_filenames = None for prep_task in prep_tasks: From d1401ec181793559530a9168dceb436c3ad75422 Mon Sep 17 00:00:00 2001 From: "Chan-Hoo.Jeon-NOAA" <60152248+chan-hoo@users.noreply.github.com> Date: Thu, 4 Apr 2024 08:57:31 -0400 Subject: [PATCH 13/42] [develop] Fix failure on warm start option of SRW-AQM (#1065) * Fix failure on the warm start option of SRW-AQM. * Change the sample script config.aqm.yaml for running a warm start. * Change cpreq to cp because it does not work correctly on other machines except for WCOSS2. * Add missing exclusion to .gitignore. --- .gitignore | 1 + parm/wflow/coldstart.yaml | 30 +++---- scripts/exregional_make_ics.sh | 8 +- scripts/exregional_make_lbcs.sh | 2 +- scripts/exsrw_aqm_ics.sh | 78 ++++++++++++------- scripts/exsrw_aqm_lbcs.sh | 8 +- scripts/exsrw_fire_emission.sh | 14 ++-- scripts/exsrw_nexus_emission.sh | 6 +- scripts/exsrw_nexus_post_split.sh | 8 +- ...fig.aqm.community.yaml => config.aqm.yaml} | 10 ++- 10 files changed, 91 insertions(+), 74 deletions(-) rename ush/{config.aqm.community.yaml => config.aqm.yaml} (84%) diff --git a/.gitignore b/.gitignore index 2b362272f6..ed78ca4182 100644 --- a/.gitignore +++ b/.gitignore @@ -8,6 +8,7 @@ lib/ parm/aqm_utils_parm/ parm/nexus_config/ parm/ufs_utils_parm/ +parm/upp_parm/ share/ sorc/*/ tests/WE2E/WE2E_tests_*.yaml diff --git a/parm/wflow/coldstart.yaml b/parm/wflow/coldstart.yaml index 002d7f7b96..ceefe865e6 100644 --- a/parm/wflow/coldstart.yaml +++ b/parm/wflow/coldstart.yaml @@ -186,29 +186,21 @@ metatask_run_ensemble: attrs: task: point_source or_aqm_ics: - and_no_aqm_ics: - not: - taskvalid: - attrs: - task: aqm_ics_ext - not: - taskvalid: - attrs: - task: aqm_ics - and_aqm_atstart: - taskvalid: - attrs: - task: aqm_ics_ext - taskdep: - attrs: - task: aqm_ics_ext - and_aqm_cycled: + not: taskvalid: attrs: task: aqm_ics - taskdep: + taskdep: + attrs: + task: aqm_ics + or_aqm_ics_ext: + not: + taskvalid: attrs: - task: aqm_ics + task: aqm_ics_ext + taskdep: + attrs: + task: aqm_ics_ext or_aqm_lbcs: not: taskvalid: diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh index 0fd6b0884d..84d73696eb 100755 --- a/scripts/exregional_make_ics.sh +++ b/scripts/exregional_make_ics.sh @@ -650,10 +650,10 @@ if [ "${CPL_AQM}" = "TRUE" ]; then else data_trans_path="${DATA_SHARE}" fi - cpreq -p out.atm.tile${TILE_RGNL}.nc "${data_trans_path}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc" - cpreq -p out.sfc.tile${TILE_RGNL}.nc "${COMOUT}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc" - cpreq -p gfs_ctrl.nc "${COMOUT}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc" - cpreq -p gfs.bndy.nc "${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc" + cp -p out.atm.tile${TILE_RGNL}.nc "${data_trans_path}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc" + cp -p out.sfc.tile${TILE_RGNL}.nc "${COMOUT}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc" + cp -p gfs_ctrl.nc "${COMOUT}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc" + cp -p gfs.bndy.nc "${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc" else mv_vrfy out.atm.tile${TILE_RGNL}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc mv_vrfy out.sfc.tile${TILE_RGNL}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh index 3a7f586051..ca3f6401cb 100755 --- a/scripts/exregional_make_lbcs.sh +++ b/scripts/exregional_make_lbcs.sh @@ -560,7 +560,7 @@ located in the following directory: fcst_hhh=$(( ${lbc_spec_fhrs} - ${EXTRN_MDL_LBCS_OFFSET_HRS} )) fcst_hhh_FV3LAM=$( printf "%03d" "$fcst_hhh" ) if [ "${CPL_AQM}" = "TRUE" ]; then - cpreq -p gfs.bndy.nc ${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc + cp -p gfs.bndy.nc ${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc else mv_vrfy gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc fi diff --git a/scripts/exsrw_aqm_ics.sh b/scripts/exsrw_aqm_ics.sh index 9104374705..efd833b092 100755 --- a/scripts/exsrw_aqm_ics.sh +++ b/scripts/exsrw_aqm_ics.sh @@ -55,31 +55,54 @@ tial or boundary condition files for the FV3 will be generated. # rst_dir="${PREV_CYCLE_DIR}/RESTART" rst_file="fv_tracer.res.tile1.nc" -fv_tracer_file="${rst_dir}/${PDY}.${cyc}0000.${rst_file}" -print_info_msg "Looking for tracer restart file: \"${fv_tracer_file}\"" -if [ ! -r ${fv_tracer_file} ]; then - if [ -r ${rst_dir}/coupler.res ]; then - rst_info=( $( tail -n 1 ${rst_dir}/coupler.res ) ) - # Remove leading zeros from ${rst_info[1]} - month="${rst_info[1]#"${rst_info[1]%%[!0]*}"}" - # Remove leading zeros from ${rst_info[2]} - day="${rst_info[2]#"${rst_info[2]%%[!0]*}"}" - # Format the date without leading zeros - rst_date=$(printf "%04d%02d%02d%02d" ${rst_info[0]} $((10#$month)) $((10#$day)) ${rst_info[3]}) - print_info_msg " - Tracer file not found. Checking available restart date: - requested date: \"${PDY}${cyc}\" - available date: \"${rst_date}\"" - if [ "${rst_date}" = "${PDY}${cyc}" ] ; then - fv_tracer_file="${rst_dir}/${rst_file}" - if [ -r ${fv_tracer_file} ]; then - print_info_msg "Tracer file found: \"${fv_tracer_file}\"" - else - message_txt="FATAL ERROR No suitable tracer restart file ${rst_dir}/${rst_file} found." - err_exit "${message_txt}" - print_err_msg_exit "${message_txt}" - fi +rst_file_with_date="${PDY}.${cyc}0000.${rst_file}" +if [ -e "${rst_dir}/${rst_file_with_date}" ]; then + fv_tracer_file="${rst_dir}/${rst_file_with_date}" +elif [ -e "${rst_dir}/${rst_file}" ]; then + fv_tracer_file="${rst_dir}/${rst_file}" +else + message_txt="Tracer restart file: \"${fv_tracer_file}\" is NOT found" + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" +fi +print_info_msg "Tracer restart file: \"${fv_tracer_file}\"" + +cplr_file="coupler.res" +cplr_file_with_date="${PDY}.${cyc}0000.${cplr_file}" +if [ -e "${rst_dir}/${cplr_file_with_date}" ]; then + coupler_file="${rst_dir}/${cplr_file_with_date}" +elif [ -e "${rst_dir}/${cplr_file}" ]; then + coupler_file="${rst_dir}/${cplr_file}" +else + message_txt="Coupler file: \"${coupler_file}\" is NOT found" + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" +fi +print_info_msg "Coupler file: \"${coupler_file}\"" + +if [ -r ${coupler_file} ]; then + rst_info=( $( tail -n 1 ${coupler_file} ) ) + # Remove leading zeros from ${rst_info[1]} + month="${rst_info[1]#"${rst_info[1]%%[!0]*}"}" + # Remove leading zeros from ${rst_info[2]} + day="${rst_info[2]#"${rst_info[2]%%[!0]*}"}" + # Format the date without leading zeros + rst_date=$(printf "%04d%02d%02d%02d" ${rst_info[0]} $((10#$month)) $((10#$day)) ${rst_info[3]}) + if [ "${rst_date}" = "${PDY}${cyc}" ]; then + if [ -r ${fv_tracer_file} ]; then + print_info_msg "Tracer restart file is for ${PDY}${cyc}" + else + message_txt="Tracer restart file \"${fv_tracer_file}\" is NOT readable." + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi + else + message_txt="Tracer restart file is NOT for ${PDY}${cyc}. +Checking available restart date: + requested date: \"${PDY}${cyc}\" + available date: \"${rst_date}\"" + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi fi # @@ -103,7 +126,7 @@ print_info_msg " tracer file: \"${fv_tracer_file}\" FV3 IC file: \"${gfs_ic_fp}\"" -cpreq ${gfs_ic_fp} ${wrk_ic_fp} +cp -p ${gfs_ic_fp} ${wrk_ic_fp} ${USHsrw}/aqm_utils_python/add_aqm_ics.py --fv_tracer_file "${fv_tracer_file}" --wrk_ic_file "${wrk_ic_fp}" export err=$? if [ $err -ne 0 ]; then @@ -122,10 +145,7 @@ fi mv tmp1.nc ${gfs_ic_fn} -cpreq -p ${gfs_ic_fn} ${COMOUT} -cpreq -p "${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc" ${COMOUT} -cpreq -p "${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc" ${COMOUT} -cpreq -p "${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc" ${COMOUT} +cp -p ${gfs_ic_fn} ${COMOUT} unset fv_tracer_file unset wrk_ic_file diff --git a/scripts/exsrw_aqm_lbcs.sh b/scripts/exsrw_aqm_lbcs.sh index f6d932962e..93dc119ec2 100755 --- a/scripts/exsrw_aqm_lbcs.sh +++ b/scripts/exsrw_aqm_lbcs.sh @@ -97,7 +97,7 @@ aqm_lbcs_fn_prefix="${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f" for hr in 0 ${LBC_SPEC_FCST_HRS[@]}; do fhr=$( printf "%03d" "${hr}" ) aqm_lbcs_fn="${aqm_lbcs_fn_prefix}${fhr}.nc" - cpreq "${DATA_SHARE}/${aqm_lbcs_fn}" ${DATA} + cp -p "${DATA_SHARE}/${aqm_lbcs_fn}" ${DATA} done if [ "${DO_AQM_CHEM_LBCS}" = "TRUE" ]; then @@ -106,7 +106,7 @@ if [ "${DO_AQM_CHEM_LBCS}" = "TRUE" ]; then chem_lbcs_fp="${FIXaqm}/chemlbc/${chem_lbcs_fn}" if [ -f ${chem_lbcs_fp} ]; then #Copy the boundary condition file to the current location - cpreq ${chem_lbcs_fp} . + cp -p ${chem_lbcs_fp} . else message_txt="The chemical LBC files do not exist: CHEM_BOUNDARY_CONDITION_FILE = \"${chem_lbcs_fp}\"" @@ -125,7 +125,7 @@ if [ "${DO_AQM_CHEM_LBCS}" = "TRUE" ]; then err_exit "${message_txt}" print_err_msg_exit "${message_txt}" fi - cpreq ${aqm_lbcs_fn} "${aqm_lbcs_fn}_chemlbc" + cp -p ${aqm_lbcs_fn} "${aqm_lbcs_fn}_chemlbc" fi done @@ -229,7 +229,7 @@ fi for hr in 0 ${LBC_SPEC_FCST_HRS[@]}; do fhr=$( printf "%03d" "${hr}" ) aqm_lbcs_fn="${aqm_lbcs_fn_prefix}${fhr}.nc" - cpreq -p "${DATA}/${aqm_lbcs_fn}" ${COMOUT} + cp -p "${DATA}/${aqm_lbcs_fn}" ${COMOUT} done # print_info_msg " diff --git a/scripts/exsrw_fire_emission.sh b/scripts/exsrw_fire_emission.sh index 68178016e7..cb44c99d8d 100755 --- a/scripts/exsrw_fire_emission.sh +++ b/scripts/exsrw_fire_emission.sh @@ -70,7 +70,7 @@ aqm_fire_file_fn="${AQM_FIRE_FILE_PREFIX}_${YYYYMMDD}_t${HH}z${AQM_FIRE_FILE_SUF # Check if the fire file exists in the designated directory if [ -e "${COMINfire}/${aqm_fire_file_fn}" ]; then - cpreq "${COMINfire}/${aqm_fire_file_fn}" ${COMOUT} + cp -p "${COMINfire}/${aqm_fire_file_fn}" ${COMOUT} else # Copy raw data for ihr in {0..23}; do @@ -83,16 +83,16 @@ else yyyymmdd_dn_md1="${missing_download_time:0:8}" FILE_13km_md1="RAVE-HrlyEmiss-13km_v*_blend_s${missing_download_time}00000_e${missing_download_time}59590_c*.nc" if [ -s `ls ${COMINfire}/${yyyymmdd_dn}/rave/${FILE_13km}` ] && [ $(stat -c %s `ls ${COMINfire}/${yyyymmdd_dn}/rave/${FILE_13km}`) -gt 4000000 ]; then - cpreq -p ${COMINfire}/${yyyymmdd_dn}/rave/${FILE_13km} ${FILE_curr} + cp -p ${COMINfire}/${yyyymmdd_dn}/rave/${FILE_13km} ${FILE_curr} elif [ -s `ls ${COMINfire}/${yyyymmdd_dn_md1}/rave/${FILE_13km_md1}` ] && [ $(stat -c %s `ls ${COMINfire}/${yyyymmdd_dn_md1}/rave/${FILE_13km_md1}`) -gt 4000000 ]; then echo "WARNING: ${FILE_13km} does not exist or broken. Replacing with the file of previous date ..." - cpreq -p ${COMINfire}/${yyyymmdd_dn_md1}/rave/${FILE_13km_md1} ${FILE_curr} + cp -p ${COMINfire}/${yyyymmdd_dn_md1}/rave/${FILE_13km_md1} ${FILE_curr} else message_txt="WARNING Fire Emission RAW data does not exist or broken: FILE_13km_md1 = \"${FILE_13km_md1}\" DCOMINfire = \"${DCOMINfire}\"" - cpreq -p ${FIXaqm}/fire/Hourly_Emissions_13km_dummy.nc ${FILE_curr} + cp -p ${FIXaqm}/fire/Hourly_Emissions_13km_dummy.nc ${FILE_curr} print_info_msg "WARNING: ${message_txt}. Replacing with the dummy file :: AQM RUN SOFT FAILED." fi done @@ -134,8 +134,8 @@ else print_err_msg_exit "${message_txt}" fi - cpreq Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24.nc Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24_1.nc - cpreq Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24.nc Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24_2.nc + cp -p Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24.nc Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24_1.nc + cp -p Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24.nc Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24_2.nc ncrcat -O -D 2 Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24.nc Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24_1.nc Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24_2.nc ${aqm_fire_file_fn} export err=$? @@ -155,7 +155,7 @@ else mv temp6.nc ${aqm_fire_file_fn} # Copy the final fire emission file to data share directory - cpreq "${DATA}/${aqm_fire_file_fn}" ${COMOUT} + cp -p "${DATA}/${aqm_fire_file_fn}" ${COMOUT} fi # #----------------------------------------------------------------------- diff --git a/scripts/exsrw_nexus_emission.sh b/scripts/exsrw_nexus_emission.sh index 7edd18ce42..a5769a6483 100755 --- a/scripts/exsrw_nexus_emission.sh +++ b/scripts/exsrw_nexus_emission.sh @@ -103,12 +103,12 @@ fi # #----------------------------------------------------------------------- # -cpreq ${FIXaqm}/nexus/${NEXUS_GRID_FN} ${DATA}/grid_spec.nc +cp -p ${FIXaqm}/nexus/${NEXUS_GRID_FN} ${DATA}/grid_spec.nc if [ "${USE_GFS_SFC}" = "TRUE" ]; then - cpreq ${PARMsrw}/nexus_config/cmaq_gfs_megan/*.rc ${DATA} + cp -p ${PARMsrw}/nexus_config/cmaq_gfs_megan/*.rc ${DATA} else - cpreq ${PARMsrw}/nexus_config/cmaq/*.rc ${DATA} + cp -p ${PARMsrw}/nexus_config/cmaq/*.rc ${DATA} fi # #----------------------------------------------------------------------- diff --git a/scripts/exsrw_nexus_post_split.sh b/scripts/exsrw_nexus_post_split.sh index 3b83dee523..517893b5e5 100755 --- a/scripts/exsrw_nexus_post_split.sh +++ b/scripts/exsrw_nexus_post_split.sh @@ -74,12 +74,12 @@ end_date=`$NDATE +${FCST_LEN_HRS} ${YYYYMMDD}${HH}` # #----------------------------------------------------------------------- # -cpreq ${PARMsrw}/nexus_config/cmaq/HEMCO_sa_Time.rc ${DATA}/HEMCO_sa_Time.rc -cpreq ${FIXaqm}/nexus/${NEXUS_GRID_FN} ${DATA}/grid_spec.nc +cp -p ${PARMsrw}/nexus_config/cmaq/HEMCO_sa_Time.rc ${DATA}/HEMCO_sa_Time.rc +cp -p ${FIXaqm}/nexus/${NEXUS_GRID_FN} ${DATA}/grid_spec.nc if [ "${NUM_SPLIT_NEXUS}" = "01" ]; then nspt="00" - cpreq ${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.${nspt}.nc ${DATA}/NEXUS_Expt_combined.nc + cp -p ${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.${nspt}.nc ${DATA}/NEXUS_Expt_combined.nc else ${USHsrw}/nexus_utils/python/concatenate_nexus_post_split.py "${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.*.nc" "${DATA}/NEXUS_Expt_combined.nc" export err=$? @@ -110,7 +110,7 @@ fi # #----------------------------------------------------------------------- # -mv ${DATA}/NEXUS_Expt.nc ${COMOUT}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt.nc +cp -p ${DATA}/NEXUS_Expt.nc ${COMOUT}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt.nc # # Print message indicating successful completion of script. # diff --git a/ush/config.aqm.community.yaml b/ush/config.aqm.yaml similarity index 84% rename from ush/config.aqm.community.yaml rename to ush/config.aqm.yaml index 2f32d0eac5..2718eafbbf 100644 --- a/ush/config.aqm.community.yaml +++ b/ush/config.aqm.yaml @@ -1,5 +1,5 @@ metadata: - description: config for Online-CMAQ, AQM_NA_13km, community mode + description: config for Online-CMAQ, AQM_NA_13km, warm-start user: RUN_ENVIR: community MACHINE: hera @@ -7,7 +7,7 @@ user: workflow: USE_CRON_TO_RELAUNCH: true CRON_RELAUNCH_INTVL_MNTS: 3 - EXPT_SUBDIR: aqm_community_aqmna13 + EXPT_SUBDIR: aqm_AQMNA13km_warmstart PREDEF_GRID_NAME: AQM_NA_13km CCPP_PHYS_SUITE: FV3_GFS_v16 DATE_FIRST_CYCL: '2023111000' @@ -21,12 +21,16 @@ workflow: DIAG_TABLE_TMPL_FN: diag_table_aqm.FV3_GFS_v16 FIELD_TABLE_TMPL_FN: field_table_aqm.FV3_GFS_v16 DO_REAL_TIME: false + COLDSTART: false # set to true for cold start + WARMSTART_CYCLE_DIR: '/scratch2/NAGAPE/epic/SRW-AQM_DATA/aqm_data/restart/2023111000' nco: + envir_default: test_aqm_warmstart NET_default: aqm + RUN_default: aqm rocoto: tasks: taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/aqm_prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' - task_aqm_ics_ext: +# task_aqm_ics_ext: # uncomment this in case of COLDSTART: true metatask_run_ensemble: task_run_fcst_mem#mem#: walltime: 01:00:00 From bf7b49058709b7bae3c02d49300127203b5266ff Mon Sep 17 00:00:00 2001 From: "Chan-Hoo.Jeon-NOAA" <60152248+chan-hoo@users.noreply.github.com> Date: Mon, 8 Apr 2024 09:01:43 -0400 Subject: [PATCH 14/42] [develop] Port SRW-AQM to Orion and Hercules (#1067) * Port SRW-AQM to Orion and Hercules --- modulefiles/build_derecho_intel.lua | 2 ++ modulefiles/build_hercules_intel.lua | 1 + modulefiles/build_orion_intel.lua | 1 + modulefiles/tasks/derecho/nexus_post_split.local.lua | 3 ++- modulefiles/tasks/derecho/python_srw.lua | 3 --- modulefiles/tasks/hercules/aqm_ics.local.lua | 1 - modulefiles/tasks/hercules/aqm_lbcs.local.lua | 3 +-- modulefiles/tasks/hercules/fire_emission.local.lua | 1 - modulefiles/tasks/hercules/nexus_emission.local.lua | 1 - modulefiles/tasks/hercules/nexus_post_split.local.lua | 1 - modulefiles/tasks/hercules/python_srw.lua | 3 --- modulefiles/tasks/orion/aqm_ics.local.lua | 1 - modulefiles/tasks/orion/aqm_lbcs.local.lua | 3 +-- modulefiles/tasks/orion/fire_emission.local.lua | 1 - modulefiles/tasks/orion/nexus_emission.local.lua | 1 - modulefiles/tasks/orion/nexus_post_split.local.lua | 1 - modulefiles/tasks/orion/python_srw.lua | 3 --- parm/wflow/aqm_prep.yaml | 2 ++ .../aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml | 2 +- ush/config.aqm.yaml | 10 ++++++---- ush/machine/derecho.yaml | 2 +- ush/machine/hercules.yaml | 9 ++++++++- ush/machine/orion.yaml | 9 ++++++++- 23 files changed, 34 insertions(+), 30 deletions(-) delete mode 100644 modulefiles/tasks/derecho/python_srw.lua delete mode 100644 modulefiles/tasks/hercules/python_srw.lua delete mode 100644 modulefiles/tasks/orion/python_srw.lua diff --git a/modulefiles/build_derecho_intel.lua b/modulefiles/build_derecho_intel.lua index ac98c39e53..e057c9e5dc 100644 --- a/modulefiles/build_derecho_intel.lua +++ b/modulefiles/build_derecho_intel.lua @@ -14,5 +14,7 @@ load(pathJoin("cmake", os.getenv("cmake_ver") or "3.26.3")) load("srw_common") +load(pathJoin("prod_util", os.getenv("prod_util_ver") or "1.2.2")) + setenv("CMAKE_Platform","derecho.intel") diff --git a/modulefiles/build_hercules_intel.lua b/modulefiles/build_hercules_intel.lua index cec2a3a30e..531f48a080 100644 --- a/modulefiles/build_hercules_intel.lua +++ b/modulefiles/build_hercules_intel.lua @@ -17,6 +17,7 @@ load("srw_common") load("nccmp/1.9.0.1") load("nco/5.0.6") +load(pathJoin("prod_util", os.getenv("prod_util_ver") or "1.2.2")) setenv("CFLAGS","-diag-disable=10441") setenv("FFLAGS","-diag-disable=10441") diff --git a/modulefiles/build_orion_intel.lua b/modulefiles/build_orion_intel.lua index d3e777d1dc..8e895c5bee 100644 --- a/modulefiles/build_orion_intel.lua +++ b/modulefiles/build_orion_intel.lua @@ -18,6 +18,7 @@ load("srw_common") load("nccmp/1.9.0.1") load("nco/5.0.6") load("wget") +load(pathJoin("prod_util", os.getenv("prod_util_ver") or "1.2.2")) setenv("CMAKE_C_COMPILER","mpiicc") setenv("CMAKE_CXX_COMPILER","mpiicpc") diff --git a/modulefiles/tasks/derecho/nexus_post_split.local.lua b/modulefiles/tasks/derecho/nexus_post_split.local.lua index 07d126ff0b..e7f216375c 100644 --- a/modulefiles/tasks/derecho/nexus_post_split.local.lua +++ b/modulefiles/tasks/derecho/nexus_post_split.local.lua @@ -1,3 +1,4 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "5.0.6")) +load("nco/5.0.6") + load("ncarenv") load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/python_srw.lua b/modulefiles/tasks/derecho/python_srw.lua deleted file mode 100644 index fe6c73a7d5..0000000000 --- a/modulefiles/tasks/derecho/python_srw.lua +++ /dev/null @@ -1,3 +0,0 @@ -unload("python") -load("conda") -setenv("SRW_ENV", "srw_app") diff --git a/modulefiles/tasks/hercules/aqm_ics.local.lua b/modulefiles/tasks/hercules/aqm_ics.local.lua index 2aac950d8d..df0e35d5da 100644 --- a/modulefiles/tasks/hercules/aqm_ics.local.lua +++ b/modulefiles/tasks/hercules/aqm_ics.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) load("python_srw_aqm") diff --git a/modulefiles/tasks/hercules/aqm_lbcs.local.lua b/modulefiles/tasks/hercules/aqm_lbcs.local.lua index 5a7b0cece6..df0e35d5da 100644 --- a/modulefiles/tasks/hercules/aqm_lbcs.local.lua +++ b/modulefiles/tasks/hercules/aqm_lbcs.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("miniconda_regional_workflow_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/hercules/fire_emission.local.lua b/modulefiles/tasks/hercules/fire_emission.local.lua index 2aac950d8d..df0e35d5da 100644 --- a/modulefiles/tasks/hercules/fire_emission.local.lua +++ b/modulefiles/tasks/hercules/fire_emission.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) load("python_srw_aqm") diff --git a/modulefiles/tasks/hercules/nexus_emission.local.lua b/modulefiles/tasks/hercules/nexus_emission.local.lua index 2aac950d8d..df0e35d5da 100644 --- a/modulefiles/tasks/hercules/nexus_emission.local.lua +++ b/modulefiles/tasks/hercules/nexus_emission.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) load("python_srw_aqm") diff --git a/modulefiles/tasks/hercules/nexus_post_split.local.lua b/modulefiles/tasks/hercules/nexus_post_split.local.lua index 2aac950d8d..df0e35d5da 100644 --- a/modulefiles/tasks/hercules/nexus_post_split.local.lua +++ b/modulefiles/tasks/hercules/nexus_post_split.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) load("python_srw_aqm") diff --git a/modulefiles/tasks/hercules/python_srw.lua b/modulefiles/tasks/hercules/python_srw.lua deleted file mode 100644 index fe6c73a7d5..0000000000 --- a/modulefiles/tasks/hercules/python_srw.lua +++ /dev/null @@ -1,3 +0,0 @@ -unload("python") -load("conda") -setenv("SRW_ENV", "srw_app") diff --git a/modulefiles/tasks/orion/aqm_ics.local.lua b/modulefiles/tasks/orion/aqm_ics.local.lua index 2aac950d8d..df0e35d5da 100644 --- a/modulefiles/tasks/orion/aqm_ics.local.lua +++ b/modulefiles/tasks/orion/aqm_ics.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) load("python_srw_aqm") diff --git a/modulefiles/tasks/orion/aqm_lbcs.local.lua b/modulefiles/tasks/orion/aqm_lbcs.local.lua index 5a7b0cece6..df0e35d5da 100644 --- a/modulefiles/tasks/orion/aqm_lbcs.local.lua +++ b/modulefiles/tasks/orion/aqm_lbcs.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("miniconda_regional_workflow_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/orion/fire_emission.local.lua b/modulefiles/tasks/orion/fire_emission.local.lua index 2aac950d8d..df0e35d5da 100644 --- a/modulefiles/tasks/orion/fire_emission.local.lua +++ b/modulefiles/tasks/orion/fire_emission.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) load("python_srw_aqm") diff --git a/modulefiles/tasks/orion/nexus_emission.local.lua b/modulefiles/tasks/orion/nexus_emission.local.lua index 2aac950d8d..df0e35d5da 100644 --- a/modulefiles/tasks/orion/nexus_emission.local.lua +++ b/modulefiles/tasks/orion/nexus_emission.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) load("python_srw_aqm") diff --git a/modulefiles/tasks/orion/nexus_post_split.local.lua b/modulefiles/tasks/orion/nexus_post_split.local.lua index 2aac950d8d..df0e35d5da 100644 --- a/modulefiles/tasks/orion/nexus_post_split.local.lua +++ b/modulefiles/tasks/orion/nexus_post_split.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) load("python_srw_aqm") diff --git a/modulefiles/tasks/orion/python_srw.lua b/modulefiles/tasks/orion/python_srw.lua deleted file mode 100644 index fe6c73a7d5..0000000000 --- a/modulefiles/tasks/orion/python_srw.lua +++ /dev/null @@ -1,3 +0,0 @@ -unload("python") -load("conda") -setenv("SRW_ENV", "srw_app") diff --git a/parm/wflow/aqm_prep.yaml b/parm/wflow/aqm_prep.yaml index d8f01d2c82..c57d2198f0 100644 --- a/parm/wflow/aqm_prep.yaml +++ b/parm/wflow/aqm_prep.yaml @@ -106,6 +106,7 @@ task_aqm_ics_ext: <<: *default_vars PREV_CYCLE_DIR: '&WARMSTART_CYCLE_DIR;' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + memory: 64G dependency: and: taskdep: @@ -131,6 +132,7 @@ task_aqm_ics: <<: *default_vars PREV_CYCLE_DIR: '&COMIN_DIR;' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + memory: 64G dependency: and: taskdep: diff --git a/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml b/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml index 1587fadcc1..2901d1ebf1 100644 --- a/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml +++ b/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml @@ -21,7 +21,7 @@ rocoto: task_aqm_ics_ext: metatask_run_ensemble: task_run_fcst_mem#mem#: - walltime: 01:00:00 + walltime: 01:20:00 task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS FV3GFS_FILE_FMT_ICS: netcdf diff --git a/ush/config.aqm.yaml b/ush/config.aqm.yaml index 2718eafbbf..155f846add 100644 --- a/ush/config.aqm.yaml +++ b/ush/config.aqm.yaml @@ -1,8 +1,8 @@ metadata: - description: config for Online-CMAQ, AQM_NA_13km, warm-start + description: config for SRW-AQM, AQM_NA_13km, warm-start user: RUN_ENVIR: community - MACHINE: hera + MACHINE: [hera/orion/hercules/derecho] ACCOUNT: [account name] workflow: USE_CRON_TO_RELAUNCH: true @@ -22,7 +22,9 @@ workflow: FIELD_TABLE_TMPL_FN: field_table_aqm.FV3_GFS_v16 DO_REAL_TIME: false COLDSTART: false # set to true for cold start - WARMSTART_CYCLE_DIR: '/scratch2/NAGAPE/epic/SRW-AQM_DATA/aqm_data/restart/2023111000' + WARMSTART_CYCLE_DIR: '/scratch2/NAGAPE/epic/SRW-AQM_DATA/aqm_data/restart/2023111000' # for hera +# WARMSTART_CYCLE_DIR: '/work/noaa/epic/SRW-AQM_DATA/aqm_data/restart/2023111000' # for orion/hercules +# WARMSTART_CYCLE_DIR: '' # for derecho nco: envir_default: test_aqm_warmstart NET_default: aqm @@ -33,7 +35,7 @@ rocoto: # task_aqm_ics_ext: # uncomment this in case of COLDSTART: true metatask_run_ensemble: task_run_fcst_mem#mem#: - walltime: 01:00:00 + walltime: 01:20:00 task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS FV3GFS_FILE_FMT_ICS: netcdf diff --git a/ush/machine/derecho.yaml b/ush/machine/derecho.yaml index 511ccc2784..b12e65513c 100644 --- a/ush/machine/derecho.yaml +++ b/ush/machine/derecho.yaml @@ -16,7 +16,7 @@ platform: RUN_CMD_SERIAL: time RUN_CMD_UTILS: mpiexec -n $nprocs RUN_CMD_NEXUS: mpiexec -n $nprocs - RUN_CMD_AQMLBC: mpiexec -n ${NUMTS} + RUN_CMD_AQMLBC: mpiexec -n ${numts} PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }' TEST_EXTRN_MDL_SOURCE_BASEDIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/input_model_data TEST_AQM_INPUT_BASEDIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/aqm_data diff --git a/ush/machine/hercules.yaml b/ush/machine/hercules.yaml index e300cf3d6d..e29801dd49 100644 --- a/ush/machine/hercules.yaml +++ b/ush/machine/hercules.yaml @@ -19,7 +19,7 @@ platform: RUN_CMD_SERIAL: time RUN_CMD_UTILS: srun --export=ALL -n $nprocs RUN_CMD_NEXUS: srun --export=ALL - RUN_CMD_AQMLBC: srun --export=ALL -n ${NUMTS} + RUN_CMD_AQMLBC: srun --export=ALL -n ${numts} SCHED_NATIVE_CMD: --export=NONE PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }' TEST_EXTRN_MDL_SOURCE_BASEDIR: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data @@ -33,6 +33,8 @@ platform: FIXorg: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/fix/fix_orog FIXsfc: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/fix/fix_sfc_climo FIXshp: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/NaturalEarth + FIXaqm: /work/noaa/epic/SRW-AQM_DATA/fix_aqm + FIXemis: /work/noaa/epic/SRW-AQM_DATA/fix_emis EXTRN_MDL_DATA_STORES: aws data: ics_lbcs: @@ -44,3 +46,8 @@ data: HRRR: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data/HRRR/${yyyymmdd}${hh} RAP: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data/RAP/${yyyymmdd}${hh} GSMGFS: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data/GSMGFS/${yyyymmdd}${hh} + +cpl_aqm_parm: + COMINfire_default: /work/noaa/epic/SRW-AQM_DATA/aqm_data/RAVE_fire + COMINgefs_default: /work/noaa/epic/SRW-AQM_DATA/aqm_data/GEFS_DATA + NEXUS_GFS_SFC_DIR: /work/noaa/epic/SRW-AQM_DATA/aqm_data/GFS_SFC_DATA diff --git a/ush/machine/orion.yaml b/ush/machine/orion.yaml index 3f74905c8f..3f756e2836 100644 --- a/ush/machine/orion.yaml +++ b/ush/machine/orion.yaml @@ -19,7 +19,7 @@ platform: RUN_CMD_SERIAL: time RUN_CMD_UTILS: srun --export=ALL RUN_CMD_NEXUS: srun --export=ALL - RUN_CMD_AQMLBC: srun --export=ALL -n ${NUMTS} + RUN_CMD_AQMLBC: srun --export=ALL -n ${numts} SCHED_NATIVE_CMD: --export=NONE PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }' TEST_EXTRN_MDL_SOURCE_BASEDIR: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data @@ -32,6 +32,8 @@ platform: FIXorg: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/fix/fix_orog FIXsfc: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/fix/fix_sfc_climo FIXshp: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/NaturalEarth + FIXaqm: /work/noaa/epic/SRW-AQM_DATA/fix_aqm + FIXemis: /work/noaa/epic/SRW-AQM_DATA/fix_emis EXTRN_MDL_DATA_STORES: aws nomads data: ics_lbcs: @@ -43,3 +45,8 @@ data: HRRR: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data/HRRR/${yyyymmdd}${hh} RAP: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data/RAP/${yyyymmdd}${hh} GSMGFS: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data/GSMGFS/${yyyymmdd}${hh} + +cpl_aqm_parm: + COMINfire_default: /work/noaa/epic/SRW-AQM_DATA/aqm_data/RAVE_fire + COMINgefs_default: /work/noaa/epic/SRW-AQM_DATA/aqm_data/GEFS_DATA + NEXUS_GFS_SFC_DIR: /work/noaa/epic/SRW-AQM_DATA/aqm_data/GFS_SFC_DATA From aa1678b490e34a4bb0cd87960fa968d2eb082d25 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 12 Apr 2024 12:30:02 -0400 Subject: [PATCH 15/42] Bump idna from 3.6 to 3.7 in /doc (#1071) Bumps [idna](https://github.com/kjd/idna) from 3.6 to 3.7. - [Release notes](https://github.com/kjd/idna/releases) - [Changelog](https://github.com/kjd/idna/blob/master/HISTORY.rst) - [Commits](https://github.com/kjd/idna/compare/v3.6...v3.7) --- updated-dependencies: - dependency-name: idna dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- doc/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/requirements.txt b/doc/requirements.txt index 0671225d72..eadc94dcaf 100644 --- a/doc/requirements.txt +++ b/doc/requirements.txt @@ -18,7 +18,7 @@ docutils==0.20.1 # sphinx # sphinx-rtd-theme # sphinxcontrib-bibtex -idna==3.6 +idna==3.7 # via requests imagesize==1.4.1 # via sphinx From a609196be0d84cb4775d79373275717e389dde77 Mon Sep 17 00:00:00 2001 From: Bruce Kropp - Raytheon <104453151+BruceKropp-Raytheon@users.noreply.github.com> Date: Mon, 15 Apr 2024 08:42:57 -0700 Subject: [PATCH 16/42] [develop] Feature/cicd metrics adds methods to collect resource usage data from major stages of the SRW pipeline build job (#1058) Updated SRW Jenkinsfile with some run-time stats collection, and adds a final stage that triggers ufs-srw-metrics stats collection job for reporting metrics. The SRW pipeline job that uses this Jenkinsfile will now use the 'time' command when executing major stages: init, build, test. This will collect CPU, Memory, and DiskUsage measurements that can be later used in trend plots on a metrics dashboard. Additionally, it adds options to the pipeline job that allow the operator to select just a single test, or no test suite (default is still 'coverage' suite), and allows an option to select the depth of wrapper script tasks to execute during functional testing (default is still all 9 scripts). --- .cicd/Jenkinsfile | 60 +++++++++++++++++++++++++----- .cicd/scripts/disk_usage.sh | 48 ++++++++++++++++++++++++ .cicd/scripts/srw_build.sh | 3 +- .cicd/scripts/srw_init.sh | 38 +++++++++++++++++++ .cicd/scripts/srw_test.sh | 12 +++++- .cicd/scripts/wrapper_srw_ftest.sh | 4 ++ tests/WE2E/setup_WE2E_tests.sh | 1 + 7 files changed, 153 insertions(+), 13 deletions(-) create mode 100755 .cicd/scripts/disk_usage.sh create mode 100755 .cicd/scripts/srw_init.sh diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 8cc95c6b00..ea87029408 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -16,6 +16,10 @@ pipeline { choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'derecho', 'gaea', 'hera', 'jet', 'orion', 'hercules'], description: 'Specify the platform(s) to use') // Allow job runner to filter based on compiler choice(name: 'SRW_COMPILER_FILTER', choices: ['all', 'gnu', 'intel'], description: 'Specify the compiler(s) to use to build') + // Workflow Wrapper test depth {0..9}, 0=none, 1=simple, 9=all [default] + choice(name: 'SRW_WRAPPER_TASK_DEPTH', choices: ['9', '1', '0'], description: '0=none, 1=simple, 9=all [default]') + // WE2E Tests ? + choice(name: 'SRW_WE2E_SINGLE_TEST', choices: ['coverage', 'none', 'skill-score', 'grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0'], description: 'Specify the WE2E test to use') booleanParam name: 'SRW_WE2E_COMPREHENSIVE_TESTS', defaultValue: false, description: 'Whether to execute the comprehensive end-to-end tests' } @@ -126,10 +130,17 @@ pipeline { stage('Initialize') { steps { dir ("${env.SRW_PLATFORM}") { - echo "Initializing SRW (${env.SRW_COMPILER}) build environment on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" + echo "${env.STAGE_NAME} SRW (${env.SRW_COMPILER}) build environment on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" cleanWs() checkout scm - sh '"${WORKSPACE}/${SRW_PLATFORM}/manage_externals/checkout_externals"' + sh '"${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/srw_init.sh"' + sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/disk_usage.sh"' + } + } + post { + always { + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-time-srw_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-disk-usage${env.STAGE_NAME}.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } } @@ -138,8 +149,9 @@ pipeline { stage('Build') { steps { dir ("${env.SRW_PLATFORM}") { - echo "Building SRW (${env.SRW_COMPILER}) on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" + echo "${env.STAGE_NAME} SRW (${env.SRW_COMPILER}) on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" sh 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/srw_build.sh"' + sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/disk_usage.sh"' } } @@ -148,6 +160,11 @@ pipeline { sh 'cd "${WORKSPACE}/${SRW_PLATFORM}/${INSTALL_NAME}" && tar --create --gzip --verbose --file "${WORKSPACE}/${SRW_PLATFORM}/${BUILD_NAME}.tgz" *' s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: true, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/${env.BUILD_NAME}.tgz", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: true, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/build_${env.SRW_COMPILER}/srw_build-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } + always { + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-env.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-time-srw_build.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-disk-usage${env.STAGE_NAME}.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + } } } @@ -163,9 +180,12 @@ pipeline { // Try a few Workflow Task scripts to make sure E2E tests can be launched in a follow-on 'Test' stage stage('Functional WorkflowTaskTests') { + environment { + TASK_DEPTH = "${env.SRW_WRAPPER_TASK_DEPTH}" + } steps { dir ("${env.SRW_PLATFORM}") { - echo "Running simple workflow script task tests on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" + echo "Running ${TASK_DEPTH} simple workflow script task tests on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" sh 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/wrapper_srw_ftest.sh"' } } @@ -179,11 +199,12 @@ pipeline { steps { dir ("${env.SRW_PLATFORM}") { - echo "Testing SRW (${env.SRW_COMPILER}) on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" + echo "${env.STAGE_NAME} SRW (${env.SRW_COMPILER}) on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" // If executing for a Pull Request, check for the run_we2e_comprehensive_tests. If set, // override the value of the SRW_WE2E_COMPREHENSIVE_TESTS parameter script { + def single_test = params.SRW_WE2E_SINGLE_TEST def run_we2e_comprehensive_tests = params.SRW_WE2E_COMPREHENSIVE_TESTS def run_we2e_comprehensive_tests_label = 'run_we2e_comprehensive_tests' @@ -195,18 +216,37 @@ pipeline { } } - sh "SRW_WE2E_COMPREHENSIVE_TESTS=${run_we2e_comprehensive_tests}" + ' bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/srw_test.sh"' - } + sh "SRW_WE2E_COMPREHENSIVE_TESTS=${run_we2e_comprehensive_tests} SRW_WE2E_SINGLE_TEST=${single_test}" + ' bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/srw_test.sh"' + + // Archive the test log files + sh "[[ -d ${SRW_WE2E_EXPERIMENT_BASE_DIR} ]] && cd ${SRW_WE2E_EXPERIMENT_BASE_DIR} && tar --create --gzip --verbose --dereference --file ${WORKSPACE}/${SRW_PLATFORM}/we2e_test_logs-${SRW_PLATFORM}-${SRW_COMPILER}.tgz */log.generate_FV3LAM_wflow */log/* ${WORKSPACE}/${SRW_PLATFORM}/tests/WE2E/WE2E_tests_*yaml WE2E_summary*txt ${WORKSPACE}/${SRW_PLATFORM}/tests/WE2E/log.* || cat /dev/null > ${WORKSPACE}/${SRW_PLATFORM}/we2e_test_logs-${SRW_PLATFORM}-${SRW_COMPILER}.tgz" + } + sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/disk_usage.sh"' } } post { + success { + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/*_test_results-*-*.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/we2e_test_logs-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.tgz", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + } always { - // Archive the test log files - sh 'cd "${SRW_WE2E_EXPERIMENT_BASE_DIR}" && tar --create --gzip --verbose --dereference --file "${WORKSPACE}/${SRW_PLATFORM}/we2e_test_logs-${SRW_PLATFORM}-${SRW_COMPILER}.tgz" */log.generate_FV3LAM_wflow */log/* ${WORKSPACE}/${SRW_PLATFORM}/tests/WE2E/WE2E_tests_*yaml WE2E_summary*txt ${WORKSPACE}/${SRW_PLATFORM}/tests/WE2E/log.*' + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-time-srw_test.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-disk-usage${env.STAGE_NAME}.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] // Remove the data sets from the experiments directory to conserve disk space sh 'find "${SRW_WE2E_EXPERIMENT_BASE_DIR}" -regextype posix-extended -regex "^.*(orog|[0-9]{10})$" -type d | xargs rm -rf' - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/*_test_results-*-*.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/we2e_test_logs-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.tgz", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + } + } + } + + stage('Metrics') { + steps { + script { + CI_BRANCH_NAME=env.JOB_BASE_NAME.replace("%2F","%252F") + echo "Triggering job for branch ${CI_BRANCH_NAME}/${env.BUILD_NUMBER} ..." + build job: '/ufs-srweather-app/ufs-srw-metrics', parameters: [ + string(name: 'CI_JOB_NAME', value: "ufs-srweather-app/metrics"), + string(name: 'CI_BUILD_NUMBER', value: "${CI_BRANCH_NAME}/${env.BUILD_NUMBER}") + ], wait: false } } } diff --git a/.cicd/scripts/disk_usage.sh b/.cicd/scripts/disk_usage.sh new file mode 100755 index 0000000000..08a482d70f --- /dev/null +++ b/.cicd/scripts/disk_usage.sh @@ -0,0 +1,48 @@ +#!/usr/bin/env bash + +# Output a CSV report of disk usage on subdirs of some path +# Usage: +# [JOB_NAME=] [BUILD_NUMBER=] [SRW_COMPILER=] [SRW_PLATFORM=] disk_usage path depth size outfile.csv +# +# args: +# directory=$1 +# depth=$2 +# size=$3 +# outfile=$4 + +[[ -n ${WORKSPACE} ]] || WORKSPACE=$(pwd) +[[ -n ${SRW_PLATFORM} ]] || SRW_PLATFORM=$(hostname -s 2>/dev/null) || SRW_PLATFORM=$(hostname 2>/dev/null) +[[ -n ${SRW_COMPILER} ]] || SRW_COMPILER=compiler + +script_dir="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)" + +# Get repository root from Jenkins WORKSPACE variable if set, otherwise, set +# relative to script directory. +declare workspace +if [[ -n "${WORKSPACE}/${SRW_PLATFORM}" ]]; then + workspace="${WORKSPACE}/${SRW_PLATFORM}" +else + workspace="$(cd -- "${script_dir}/../.." && pwd)" +fi + +echo "STAGE_NAME=${STAGE_NAME}" # from pipeline +outfile="${4:-${workspace}-${SRW_COMPILER}-disk-usage${STAGE_NAME}.csv}" + +function disk_usage() { + local directory=${1:-${PWD}} + local depth=${2:-1} + local size=${3:-k} + echo "Disk usage: ${JOB_NAME:-ci}/${SRW_PLATFORM}/$(basename $directory)" + ( + cd $directory || exit 1 + echo "Platform,Build,Owner,Group,Inodes,${size:-k}bytes,Access Time,Filename" + du -Px -d ${depth:-1} --inode --exclude='./workspace' | \ + while read line ; do + arr=($line); inode=${arr[0]}; filename=${arr[1]}; + echo "${SRW_PLATFORM}-${SRW_COMPILER:-compiler},${JOB_NAME:-ci}/${BUILD_NUMBER:-0},$(stat -c '%U,%G' $filename),${inode:-0},$(du -Px -s -${size:-k} --time $filename)" | tr '\t' ',' ; + done | sort -t, -k5 -n #-r + ) + echo "" +} + +disk_usage $1 $2 $3 | tee ${outfile} diff --git a/.cicd/scripts/srw_build.sh b/.cicd/scripts/srw_build.sh index 196d984a05..4733c4a4ca 100755 --- a/.cicd/scripts/srw_build.sh +++ b/.cicd/scripts/srw_build.sh @@ -27,7 +27,8 @@ fi # Build and install cd ${workspace}/tests set +e -./build.sh ${platform} ${SRW_COMPILER} +/usr/bin/time -p -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' -o ${WORKSPACE}/${SRW_PLATFORM}-${SRW_COMPILER}-time-srw_build.json \ + ./build.sh ${platform} ${SRW_COMPILER} build_exit=$? set -e cd - diff --git a/.cicd/scripts/srw_init.sh b/.cicd/scripts/srw_init.sh new file mode 100755 index 0000000000..688255ac98 --- /dev/null +++ b/.cicd/scripts/srw_init.sh @@ -0,0 +1,38 @@ +#!/usr/bin/env bash +# +# A unified init script for the SRW application. This script is expected to +# fetch initial source for the SRW application for all supported platforms. +# +set -e -u -x + +script_dir="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)" + +# Get repository root from Jenkins WORKSPACE variable if set, otherwise, set +# relative to script directory. +declare workspace +if [[ -n "${WORKSPACE}/${SRW_PLATFORM}" ]]; then + workspace="${WORKSPACE}/${SRW_PLATFORM}" +else + workspace="$(cd -- "${script_dir}/../.." && pwd)" +fi + +# Normalize Parallel Works cluster platform value. +declare platform +if [[ "${SRW_PLATFORM}" =~ ^(az|g|p)clusternoaa ]]; then + platform='noaacloud' +else + platform="${SRW_PLATFORM}" +fi + +# Build and install +cd ${workspace} +set +e +/usr/bin/time -p -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' -o ${WORKSPACE}/${SRW_PLATFORM}-${SRW_COMPILER}-time-srw_init.json \ + ./manage_externals/checkout_externals +init_exit=$? +echo "STAGE_NAME=${STAGE_NAME}" +env | grep = | sort > ${WORKSPACE}/${SRW_PLATFORM}-${SRW_COMPILER}-env.txt +set -e +cd - + +exit $init_exit diff --git a/.cicd/scripts/srw_test.sh b/.cicd/scripts/srw_test.sh index 76ddf020df..49db0945a9 100755 --- a/.cicd/scripts/srw_test.sh +++ b/.cicd/scripts/srw_test.sh @@ -29,17 +29,25 @@ fi we2e_experiment_base_dir="${workspace}/expt_dirs" we2e_test_dir="${workspace}/tests/WE2E" +# Clean any stale test logs +rm -f ${workspace}/tests/WE2E/log.* +rm -f ${we2e_experiment_base_dir}/*/log.generate_FV3LAM_wflow ${we2e_experiment_base_dir}/*/log/* WE2E_summary*txt + # Run the end-to-end tests. if "${SRW_WE2E_COMPREHENSIVE_TESTS}"; then test_type="comprehensive" else - test_type="coverage" + test_type=${SRW_WE2E_SINGLE_TEST:-"coverage"} + if [[ "${test_type}" = skill-score ]]; then + test_type="grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0" + fi fi cd ${we2e_test_dir} # Progress file progress_file="${workspace}/we2e_test_results-${platform}-${SRW_COMPILER}.txt" -./setup_WE2E_tests.sh ${platform} ${SRW_PROJECT} ${SRW_COMPILER} ${test_type} \ +/usr/bin/time -p -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' -o ${WORKSPACE}/${SRW_PLATFORM}-${SRW_COMPILER}-time-srw_test.json \ + ./setup_WE2E_tests.sh ${platform} ${SRW_PROJECT} ${SRW_COMPILER} ${test_type} \ --expt_basedir=${we2e_experiment_base_dir} | tee ${progress_file} # Set exit code to number of failures diff --git a/.cicd/scripts/wrapper_srw_ftest.sh b/.cicd/scripts/wrapper_srw_ftest.sh index c6a4d19568..2ac31016e3 100755 --- a/.cicd/scripts/wrapper_srw_ftest.sh +++ b/.cicd/scripts/wrapper_srw_ftest.sh @@ -38,6 +38,10 @@ if [[ "${SRW_PLATFORM}" == jet ]]; then sed -i '15i #SBATCH --partition=xjet' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh fi +if [[ "${TASK_DEPTH}" == 0 ]] ; then + exit 0 +fi + # Call job card and return job_id echo "Running: ${workflow_cmd} -A ${SRW_PROJECT} ${arg_1} ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh" job_id=$(${workflow_cmd} -A ${SRW_PROJECT} ${arg_1} ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh) diff --git a/tests/WE2E/setup_WE2E_tests.sh b/tests/WE2E/setup_WE2E_tests.sh index 309c755966..8fa0977af7 100755 --- a/tests/WE2E/setup_WE2E_tests.sh +++ b/tests/WE2E/setup_WE2E_tests.sh @@ -80,6 +80,7 @@ export HOME=$homedir source ../../ush/load_modules_wflow.sh ${machine} # Run the E2E Workflow tests +[[ ${tests} = none ]] && echo "none" || \ ./run_WE2E_tests.py \ --machine=${machine} \ --account=${account} \ From c7e093d7133cb9368059039374f9f28245a2d9f2 Mon Sep 17 00:00:00 2001 From: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com> Date: Mon, 15 Apr 2024 12:17:41 -0400 Subject: [PATCH 17/42] [develop] Update weather model hash and correct behavior in Functional WorkflowTaskTests Jenkins stage (#1068) * The ufs-weather-model hash has been updated to 1411b90 (April 1, 2024). * Updated build_hera_gnu.lua file to allow it to work with updates to the ufs-weather-model. * Updated behavior of the Functional WorkflowTaskTests Jenkins stage to allow the test to properly finish, rather than waiting in queue for all jobs associated with the EPIC role account to finish first (modification to .cicd/scripts/wrapper_srw_ftest.sh). * Corrected the hang encountered while running the Functional WorkflowTaskTests stage on Gaea. * Applied Mike Kavulich's modification to ush/bash_utils/create_symlink_to_file.sh and converted calls to the create_symlink_to_file function from using named arguments to positional arguments (Issue #1066). --- .cicd/scripts/wrapper_srw_ftest.sh | 8 ++- Externals.cfg | 2 +- modulefiles/build_hera_gnu.lua | 8 +-- scripts/exregional_make_orog.sh | 12 ++-- scripts/exregional_run_fcst.sh | 66 +++++++----------- scripts/exregional_run_post.sh | 4 +- ush/bash_utils/create_symlink_to_file.sh | 85 +++--------------------- 7 files changed, 47 insertions(+), 138 deletions(-) diff --git a/.cicd/scripts/wrapper_srw_ftest.sh b/.cicd/scripts/wrapper_srw_ftest.sh index 2ac31016e3..950ceb7a34 100755 --- a/.cicd/scripts/wrapper_srw_ftest.sh +++ b/.cicd/scripts/wrapper_srw_ftest.sh @@ -15,17 +15,16 @@ declare arg_1 if [[ "${SRW_PLATFORM}" == cheyenne ]] || [[ "${SRW_PLATFORM}" == derecho ]]; then workflow_cmd=qsub arg_1="" - check_job="qstat -u ${USER} -r ${job_id}" else workflow_cmd=sbatch arg_1="--parsable" - check_job="squeue -u ${USER} -j ${job_id} --noheader" fi # Customize wrapper scripts if [[ "${SRW_PLATFORM}" == gaea ]]; then sed -i '15i #SBATCH --clusters=c5' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh sed -i 's|qos=batch|qos=normal|g' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh + sed -i 's|${JOBSdir}/JREGIONAL_RUN_POST|$USHdir/load_modules_run_task.sh "run_post" ${JOBSdir}/JREGIONAL_RUN_POST|g' ${WORKSPACE}/${SRW_PLATFORM}/ush/wrappers/run_post.sh fi if [[ "${SRW_PLATFORM}" == hera ]]; then @@ -52,6 +51,11 @@ sleep 10 # Check for job and exit when done while true do + if [[ "${SRW_PLATFORM}" == derecho ]]; then + check_job="qstat -u ${USER} -r ${job_id}" + else + check_job="squeue -u ${USER} -j ${job_id} --noheader" + fi job_id_info=$($check_job) if [ ! -z "$job_id_info" ]; then echo "Job is still running. Check again in two minutes" diff --git a/Externals.cfg b/Externals.cfg index 49ea5ffc38..9ed03cd285 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -12,7 +12,7 @@ protocol = git repo_url = https://github.com/ufs-community/ufs-weather-model # Specify either a branch name or a hash but not both. #branch = develop -hash = 8518c2c +hash = 1411b90 local_path = sorc/ufs-weather-model required = True diff --git a/modulefiles/build_hera_gnu.lua b/modulefiles/build_hera_gnu.lua index 5355895da9..7defa36bbf 100644 --- a/modulefiles/build_hera_gnu.lua +++ b/modulefiles/build_hera_gnu.lua @@ -19,7 +19,7 @@ load(pathJoin("nccmp", os.getenv("nccmp_ver") or "1.9.0.1")) load(pathJoin("nco", os.getenv("nco_ver") or "5.0.6")) load(pathJoin("openblas", os.getenv("openblas_ver") or "0.3.19")) -setenv("CMAKE_C_COMPILER","mpicc") -setenv("CMAKE_CXX_COMPILER","mpicxx") -setenv("CMAKE_Fortran_COMPILER","mpif90") -setenv("CMAKE_Platform","hera.gnu") +setenv("CC", "mpicc") +setenv("CXX", "mpic++") +setenv("FC", "mpif90") +setenv("CMAKE_Platform", "hera.gnu") diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index 47430a802d..0deac84d49 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -258,12 +258,9 @@ if [[ ${suites[@]} =~ "${CCPP_PHYS_SUITE}" ]] ; then grid_fp_gwd="${FIXlam}/${grid_fn_gwd}" ls_fn="geo_em.d01.lat-lon.2.5m.HGT_M.nc" ss_fn="HGT.Beljaars_filtered.lat-lon.30s_res.nc" - create_symlink_to_file target="${grid_fp_gwd}" symlink="${DATA}/${grid_fn_gwd}" \ - relative="TRUE" - create_symlink_to_file target="${FIXam}/${ls_fn}" symlink="${DATA}/${ls_fn}" \ - relative="TRUE" - create_symlink_to_file target="${FIXam}/${ss_fn}" symlink="${DATA}/${ss_fn}" \ - relative="TRUE" + create_symlink_to_file ${grid_fp_gwd} ${DATA}/${grid_fn_gwd} TRUE + create_symlink_to_file ${FIXam}/${ls_fn} ${DATA}/${ls_fn} TRUE + create_symlink_to_file ${FIXam}/${ss_fn} ${DATA}/${ss_fn} TRUE input_redirect_fn="grid_info.dat" cat > "${input_redirect_fn}" < /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# - local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) - local scrfunc_fn=$( basename "${scrfunc_fp}" ) - local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# - local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# # Specify the set of valid argument names for this script/function. Then # process the arguments provided to this script/function (which should # consist of a set of name-value pairs of the form arg1="value1", etc). # #----------------------------------------------------------------------- # - local valid_args=( \ -"target" \ -"symlink" \ -"relative" \ - ) - process_args valid_args "$@" -# -#----------------------------------------------------------------------- -# -# For debugging purposes, print out values of arguments passed to this -# script. Note that these will be printed out only if VERBOSE is set to -# TRUE. -# -#----------------------------------------------------------------------- -# - print_input_args valid_args -# -#----------------------------------------------------------------------- -# -# Verify that the required arguments to this function have been specified. -# If not, print out an error message and exit. -# -#----------------------------------------------------------------------- -# - if [ -z "${target}" ]; then - print_err_msg_exit "\ -The argument \"target\" specifying the target of the symbolic link that -this function will create was not specified in the call to this function: - target = \"$target\"" - fi +if [[ $# -lt 2 ]]; then + usage + print_err_msg_exit "Function create_symlink_to_file() requires at least two arguments" +fi - if [ -z "${symlink}" ]; then - print_err_msg_exit "\ -The argument \"symlink\" specifying the symbolic link that this function -will create was not specified in the call to this function: - symlink = \"$symlink\"" - fi +target=$1 +symlink=$2 +relative=${3:-TRUE} # #----------------------------------------------------------------------- # @@ -106,8 +48,6 @@ will create was not specified in the call to this function: # #----------------------------------------------------------------------- # - relative=${relative:-"TRUE"} - valid_vals_relative=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") check_var_valid_value "relative" "valid_vals_relative" # @@ -148,16 +88,7 @@ not exist or is not a file: # #----------------------------------------------------------------------- # - ln_vrfy -sf ${relative_flag} "$target" "$symlink" -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 +ln -sf ${relative_flag} "$target" "$symlink" } From 744bf17108b4ef7c48f60c819f941393f9bed1a2 Mon Sep 17 00:00:00 2001 From: RatkoVasic-NOAA <37597874+RatkoVasic-NOAA@users.noreply.github.com> Date: Tue, 23 Apr 2024 14:05:38 -0400 Subject: [PATCH 18/42] [develop] Update nco version (#1077) Hera with Intel compiler was using system installed nco library (4.9.3 version). It was not noticed until sys admins removed read permissions to 4.9.3 version and installed new version (5.1.6). Will use spack-stack installed nco (version 5.0.6), like all other machines/compilers. --- modulefiles/build_hera_intel.lua | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modulefiles/build_hera_intel.lua b/modulefiles/build_hera_intel.lua index ee11e4a386..72a90d9f47 100644 --- a/modulefiles/build_hera_intel.lua +++ b/modulefiles/build_hera_intel.lua @@ -26,7 +26,7 @@ load(pathJoin("cmake", cmake_ver)) load("srw_common") load(pathJoin("nccmp", os.getenv("nccmp_ver") or "1.9.0.1")) -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) +load(pathJoin("nco", os.getenv("nco_ver") or "5.0.6")) load(pathJoin("prod_util", os.getenv("prod_util_ver") or "1.2.2")) setenv("CMAKE_C_COMPILER","mpiicc") From 527b242748ebe1d8924470388061a2810b69e1fb Mon Sep 17 00:00:00 2001 From: Bruce Kropp - Raytheon <104453151+BruceKropp-Raytheon@users.noreply.github.com> Date: Thu, 25 Apr 2024 09:43:40 -0700 Subject: [PATCH 19/42] [develop] Feature cicd scorecard metric (#1079) * Update CI/CD scripts to include skill-score metric output so that follow-on metrics collection can display it on metrics Dashboard. * Update Jenkinsfile to fix post() section that calls follow-on metrics collection job so that it is only called once at the end, regardless if any platforms builds or tests fail independently. * Update the Jenkinsfile to skip platform Nodes that appear to be offline, rather than put them in the launch queue. This also means we can re-add the NOAAcloud platforms to the list of possible Nodes to attempt. The will be skipped if they are not online. * Update Jenkinsfile to include timeout limits on Build stage and Test stage, so they don't run forever. * Update Jenkinsfile to allow seeing timestamps in the Jenkins console log. --------- Co-authored-by: EdwardSnyder-NOAA --- .cicd/Jenkinsfile | 49 ++++++++++++------- .../{srw_metric_example.sh => srw_metric.sh} | 20 ++++---- .cicd/scripts/srw_test.sh | 15 +++--- 3 files changed, 50 insertions(+), 34 deletions(-) rename .cicd/scripts/{srw_metric_example.sh => srw_metric.sh} (87%) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index ea87029408..1c92a1bd65 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -5,6 +5,8 @@ pipeline { disableConcurrentBuilds() overrideIndexTriggers(false) skipDefaultCheckout(true) + timestamps() + timeout(time: 12, unit: 'HOURS') } parameters { @@ -74,6 +76,11 @@ pipeline { // Run on all platform/compiler combinations by default or build and test only on the platform(s) and // compiler(s) specified by SRW_PLATFORM_FILTER and SRW_COMPILER_FILTER when { + beforeAgent true + expression { + return nodesByLabel(env.SRW_PLATFORM).size() > 0 + } + allOf { anyOf { expression { params.SRW_PLATFORM_FILTER == 'all' } @@ -137,6 +144,7 @@ pipeline { sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/disk_usage.sh"' } } + post { always { s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-time-srw_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] @@ -147,6 +155,10 @@ pipeline { // Run the unified build script; if successful create a tarball of the build and upload to S3 stage('Build') { + options { + timeout(time: 4, unit: 'HOURS') + } + steps { dir ("${env.SRW_PLATFORM}") { echo "${env.STAGE_NAME} SRW (${env.SRW_COMPILER}) on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" @@ -183,6 +195,7 @@ pipeline { environment { TASK_DEPTH = "${env.SRW_WRAPPER_TASK_DEPTH}" } + steps { dir ("${env.SRW_PLATFORM}") { echo "Running ${TASK_DEPTH} simple workflow script task tests on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" @@ -193,6 +206,10 @@ pipeline { // Run the unified test script stage('Test') { + options { + timeout(time: 8, unit: 'HOURS') + } + environment { SRW_WE2E_EXPERIMENT_BASE_DIR = "${env.WORKSPACE}/${env.SRW_PLATFORM}/expt_dirs" } @@ -228,25 +245,13 @@ pipeline { post { success { s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/*_test_results-*-*.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/we2e_test_logs-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.tgz", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/*-skill-score.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } always { s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-time-srw_test.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-disk-usage${env.STAGE_NAME}.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] // Remove the data sets from the experiments directory to conserve disk space sh 'find "${SRW_WE2E_EXPERIMENT_BASE_DIR}" -regextype posix-extended -regex "^.*(orog|[0-9]{10})$" -type d | xargs rm -rf' - } - } - } - - stage('Metrics') { - steps { - script { - CI_BRANCH_NAME=env.JOB_BASE_NAME.replace("%2F","%252F") - echo "Triggering job for branch ${CI_BRANCH_NAME}/${env.BUILD_NUMBER} ..." - build job: '/ufs-srweather-app/ufs-srw-metrics', parameters: [ - string(name: 'CI_JOB_NAME', value: "ufs-srweather-app/metrics"), - string(name: 'CI_BUILD_NUMBER', value: "${CI_BRANCH_NAME}/${env.BUILD_NUMBER}") - ], wait: false } } } @@ -254,13 +259,23 @@ pipeline { } } } + // end of stages{} - // Uncomment the following block to re-enable PW clusters - /* post { always { - // Stop any Parallel Works clusters that were started during the pipeline execution script { + // Trigger another job to collect all build statistics + CI_JOB_NAME=env.JOB_NAME.replace("/${env.JOB_BASE_NAME}","") + CI_BRANCH_NAME=env.JOB_BASE_NAME.replace("%2F","%252F") + echo "post: Triggering ufs-srweather-app/ufs-srw-metrics job for ${CI_JOB_NAME} on branch build ${CI_BRANCH_NAME}/${env.BUILD_NUMBER} ..." + build job: '/ufs-srweather-app/ufs-srw-metrics', parameters: [ + string(name: 'CI_JOB_NAME', value: "${CI_JOB_NAME}"), + string(name: 'CI_BUILD_NUMBER', value: "${CI_BRANCH_NAME}/${env.BUILD_NUMBER}") + ], wait: false + + // Uncomment the following block to re-enable PW clusters + /* + // Stop any Parallel Works clusters that were started during the pipeline execution // def pw_clusters = ['pclusternoaav2use1', 'azclusternoaav2eus1', 'gclusternoaav2usc1'] def pw_clusters = ['pclusternoaav2use1'] def clusters = [] @@ -279,8 +294,8 @@ pipeline { // PW_CLUSTER_NAME parameter build job: 'parallel-works-jenkins-client/stop-cluster', parameters: [string(name: 'PW_CLUSTER_NAME', value: clusters[i])] } + */ } } } - */ } diff --git a/.cicd/scripts/srw_metric_example.sh b/.cicd/scripts/srw_metric.sh similarity index 87% rename from .cicd/scripts/srw_metric_example.sh rename to .cicd/scripts/srw_metric.sh index 45dd30c299..cbb216c959 100755 --- a/.cicd/scripts/srw_metric_example.sh +++ b/.cicd/scripts/srw_metric.sh @@ -56,17 +56,17 @@ else fi # Test directories -we2e_experiment_base_dir="${workspace}/../expt_dirs/metric_test" -we2e_test_dir="${workspace}/tests/WE2E" -we2e_test_name="grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0" +we2e_experiment_base_dir="${we2e_experiment_base_dir:=${workspace}/../expt_dirs/metric_test}" +we2e_test_dir="${we2e_test_dir:=${workspace}/tests/WE2E}" +we2e_test_name="${test_type:=grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0}" pwd # Setup the build environment declare srw_compiler srw_compiler=${SRW_COMPILER} -source etc/lmod-setup.sh ${platform,,} -module use modulefiles +source ${workspace}/etc/lmod-setup.sh ${platform,,} +module use ${workspace}/modulefiles module load build_${platform,,}_${srw_compiler} # Build srw @@ -99,7 +99,7 @@ if [[ ${RUN_STAT_ANLY_OPT} == true ]]; then rm -rf ${workspace}/Indy-Severe-Weather/ # Check if metprd data exists locally otherwise get it from S3 TEST_EXTRN_MDL_SOURCE_BASEDIR=$(grep TEST_EXTRN_MDL_SOURCE_BASEDIR ${workspace}/ush/machine/${SRW_PLATFORM}.yaml | awk '{print $NF}') - if [[ ! -d $(dirname ${TEST_EXTRN_MDL_SOURCE_BASEDIR})/metprd/point_stat ]] ; then + if [[ -d $(dirname ${TEST_EXTRN_MDL_SOURCE_BASEDIR})/metprd/point_stat ]] ; then mkdir -p Indy-Severe-Weather/metprd/point_stat cp -rp $(dirname ${TEST_EXTRN_MDL_SOURCE_BASEDIR})/metprd/point_stat Indy-Severe-Weather/metprd elif [[ -f Indy-Severe-Weather.tgz ]]; then @@ -108,7 +108,7 @@ if [[ ${RUN_STAT_ANLY_OPT} == true ]]; then wget https://noaa-ufs-srw-pds.s3.amazonaws.com/sample_cases/release-public-v2.1.0/Indy-Severe-Weather.tgz tar xvfz Indy-Severe-Weather.tgz fi - [[ -f skill-score.txt ]] && rm skill-score.txt + [[ -f ${platform,,}-${srw_compiler}-skill-score.txt ]] && rm ${platform,,}-${srw_compiler}-skill-score.txt # Skill score index is computed over several terms that are defined in parm/metplus/STATAnalysisConfig_skill_score. # It is computed by aggregating the output from earlier runs of the Point-Stat and/or Grid-Stat tools over one or more cases. # In this example, skill score index is a weighted average of 4 skill scores of RMSE statistics for wind speed, dew point temperature, @@ -126,15 +126,15 @@ if [[ ${RUN_STAT_ANLY_OPT} == true ]]; then sed -i 's|--load("conda")|load("conda")|g' ${workspace}/modulefiles/tasks/${platform,,}/run_vx.local.lua fi # Run stat_analysis - stat_analysis -config parm/metplus/STATAnalysisConfig_skill_score -lookin ${workspace}/Indy-Severe-Weather/metprd/point_stat -v 2 -out skill-score.txt + stat_analysis -config parm/metplus/STATAnalysisConfig_skill_score -lookin ${workspace}/Indy-Severe-Weather/metprd/point_stat -v 2 -out ${platform,,}-${srw_compiler}-skill-score.txt # check skill-score.txt - cat skill-score.txt + cat ${platform,,}-${srw_compiler}-skill-score.txt # get skill-score (SS_INDEX) and check if it is significantly smaller than 1.0 # A value greater than 1.0 indicates that the forecast model outperforms the reference, # while a value less than 1.0 indicates that the reference outperforms the forecast. - tmp_string=$( tail -2 skill-score.txt | head -1 ) + tmp_string=$( tail -2 ${platform,,}-${srw_compiler}-skill-score.txt | head -1 ) SS_INDEX=$(echo $tmp_string | awk -F " " '{print $NF}') echo "Skill Score: ${SS_INDEX}" if [[ ${SS_INDEX} < "0.700" ]]; then diff --git a/.cicd/scripts/srw_test.sh b/.cicd/scripts/srw_test.sh index 49db0945a9..8ed4756987 100755 --- a/.cicd/scripts/srw_test.sh +++ b/.cicd/scripts/srw_test.sh @@ -11,7 +11,7 @@ script_dir="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd) # Get repository root from Jenkins WORKSPACE variable if set, otherwise, set # relative to script directory. declare workspace -if [[ -n "${WORKSPACE}/${SRW_PLATFORM}" ]]; then +if [[ -d "${WORKSPACE}/${SRW_PLATFORM}" ]]; then workspace="${WORKSPACE}/${SRW_PLATFORM}" else workspace="$(cd -- "${script_dir}/../.." && pwd)" @@ -26,8 +26,8 @@ else fi # Test directories -we2e_experiment_base_dir="${workspace}/expt_dirs" -we2e_test_dir="${workspace}/tests/WE2E" +export we2e_experiment_base_dir="${workspace}/expt_dirs" +export we2e_test_dir="${workspace}/tests/WE2E" # Clean any stale test logs rm -f ${workspace}/tests/WE2E/log.* @@ -35,11 +35,11 @@ rm -f ${we2e_experiment_base_dir}/*/log.generate_FV3LAM_wflow ${we2e_experiment_ # Run the end-to-end tests. if "${SRW_WE2E_COMPREHENSIVE_TESTS}"; then - test_type="comprehensive" + export test_type="comprehensive" else - test_type=${SRW_WE2E_SINGLE_TEST:-"coverage"} + export test_type=${SRW_WE2E_SINGLE_TEST:-"coverage"} if [[ "${test_type}" = skill-score ]]; then - test_type="grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0" + export test_type="grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0" fi fi @@ -48,7 +48,8 @@ cd ${we2e_test_dir} progress_file="${workspace}/we2e_test_results-${platform}-${SRW_COMPILER}.txt" /usr/bin/time -p -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' -o ${WORKSPACE}/${SRW_PLATFORM}-${SRW_COMPILER}-time-srw_test.json \ ./setup_WE2E_tests.sh ${platform} ${SRW_PROJECT} ${SRW_COMPILER} ${test_type} \ - --expt_basedir=${we2e_experiment_base_dir} | tee ${progress_file} + --expt_basedir=${we2e_experiment_base_dir} | tee ${progress_file}; \ + [[ -f ${we2e_experiment_base_dir}/grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0/log.generate_FV3LAM_wflow ]] && ${workspace}/.cicd/scripts/srw_metric.sh run_stat_anly # Set exit code to number of failures set +e From 08537b0969d0a2aec9f1ecaf8d0d57afa7fa3d0b Mon Sep 17 00:00:00 2001 From: Emily Carpenter <137525341+elcarpenterNOAA@users.noreply.github.com> Date: Fri, 26 Apr 2024 10:09:16 -0400 Subject: [PATCH 20/42] [develop] Replace existing UW CLI with UW API calls to template (#1078) This work continues the integration of the uwtools package by replacing current use of the UW CLI with UW API calls in Python scripts. These changes are limited to the UW template tool. --- environment.yml | 2 +- ush/create_aqm_rc_file.py | 38 +++++------------------------- ush/create_diag_table_file.py | 32 ++++--------------------- ush/create_model_configure_file.py | 32 ++++--------------------- ush/create_ufs_configure_file.py | 37 +++++------------------------ ush/generate_FV3LAM_wflow.py | 30 +++++------------------ 6 files changed, 29 insertions(+), 142 deletions(-) diff --git a/environment.yml b/environment.yml index faeb19d466..e2dd6b8300 100644 --- a/environment.yml +++ b/environment.yml @@ -5,4 +5,4 @@ channels: dependencies: - pylint=2.17* - pytest=7.2* - - uwtools=2.1* + - uwtools=2.2* diff --git a/ush/create_aqm_rc_file.py b/ush/create_aqm_rc_file.py index 726e8eb0f3..739a4d9f18 100644 --- a/ush/create_aqm_rc_file.py +++ b/ush/create_aqm_rc_file.py @@ -6,9 +6,8 @@ import argparse import os import sys -import tempfile -from subprocess import STDOUT, CalledProcessError, check_output from textwrap import dedent +from uwtools.api.template import render from python_utils import ( cfg_to_yaml_str, @@ -124,36 +123,11 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): # #----------------------------------------------------------------------- # - with tempfile.NamedTemporaryFile( - dir="./", - mode="w+t", - prefix="aqm_rc_settings", - suffix=".yaml") as tmpfile: - tmpfile.write(settings_str) - tmpfile.seek(0) - cmd = " ".join(["uw template render", - "-i", - AQM_RC_TMPL_FP, - "-o", - aqm_rc_fp, - "-v", - "--values-file", - tmpfile.name, - ] - ) - indent = " " - output = "" - try: - output = check_output(cmd, encoding="utf=8", shell=True, - stderr=STDOUT, text=True) - except CalledProcessError as e: - output = e.output - print(f"Failed with status: {e.returncode}") - sys.exit(1) - finally: - print("Output:") - for line in output.split("\n"): - print(f"{indent * 2}{line}") + render( + input_file = AQM_RC_TMPL_FP, + output_file = aqm_rc_fp, + values_src = settings, + ) return True def parse_args(argv): diff --git a/ush/create_diag_table_file.py b/ush/create_diag_table_file.py index 40f5e0deee..975165dfe5 100644 --- a/ush/create_diag_table_file.py +++ b/ush/create_diag_table_file.py @@ -7,9 +7,8 @@ import argparse import os import sys -import tempfile -from subprocess import STDOUT, CalledProcessError, check_output from textwrap import dedent +from uwtools.api.template import render from python_utils import ( cfg_to_yaml_str, @@ -74,32 +73,11 @@ def create_diag_table_file(run_dir): verbose=VERBOSE, ) - with tempfile.NamedTemporaryFile(dir="./", - mode="w+t", - prefix="aqm_rc_settings", - suffix=".yaml") as tmpfile: - tmpfile.write(settings_str) - tmpfile.seek(0) - cmd = " ".join(["uw template render", - "-i", DIAG_TABLE_TMPL_FP, - "-o", diag_table_fp, - "-v", - "--values-file", tmpfile.name, - ] + render( + input_file = DIAG_TABLE_TMPL_FP, + output_file = diag_table_fp, + values_src = settings, ) - indent = " " - output = "" - try: - output = check_output(cmd, encoding="utf=8", shell=True, - stderr=STDOUT, text=True) - except CalledProcessError as e: - output = e.output - print(f"Failed with status: {e.returncode}") - sys.exit(1) - finally: - print("Output:") - for line in output.split("\n"): - print(f"{indent * 2}{line}") return True diff --git a/ush/create_model_configure_file.py b/ush/create_model_configure_file.py index cd10ac404e..cd39087688 100644 --- a/ush/create_model_configure_file.py +++ b/ush/create_model_configure_file.py @@ -6,9 +6,8 @@ import argparse import os import sys -import tempfile from textwrap import dedent -from subprocess import STDOUT, CalledProcessError, check_output +from uwtools.api.template import render from python_utils import ( cfg_to_yaml_str, @@ -220,32 +219,11 @@ def create_model_configure_file( # model_config_fp = os.path.join(run_dir, MODEL_CONFIG_FN) - with tempfile.NamedTemporaryFile(dir="./", - mode="w+t", - suffix=".yaml", - prefix="model_config_settings.") as tmpfile: - tmpfile.write(settings_str) - tmpfile.seek(0) - cmd = " ".join(["uw template render", - "-i", MODEL_CONFIG_TMPL_FP, - "-o", model_config_fp, - "-v", - "--values-file", tmpfile.name, - ] + render( + input_file = MODEL_CONFIG_TMPL_FP, + output_file = model_config_fp, + values_src = settings ) - indent = " " - output = "" - try: - output = check_output(cmd, encoding="utf=8", shell=True, - stderr=STDOUT, text=True) - except CalledProcessError as e: - output = e.output - print(f"Failed with status: {e.returncode}") - sys.exit(1) - finally: - print("Output:") - for line in output.split("\n"): - print(f"{indent * 2}{line}") return True diff --git a/ush/create_ufs_configure_file.py b/ush/create_ufs_configure_file.py index 03de3e24c7..9d4ea8afa4 100644 --- a/ush/create_ufs_configure_file.py +++ b/ush/create_ufs_configure_file.py @@ -8,9 +8,8 @@ import argparse import os import sys -import tempfile -from subprocess import STDOUT, CalledProcessError, check_output from textwrap import dedent +from uwtools.api.template import render from python_utils import ( cfg_to_yaml_str, @@ -46,7 +45,7 @@ def create_ufs_configure_file(run_dir): #----------------------------------------------------------------------- # print_info_msg(f''' - Creating a ufs.configure file (\"{UFS_CONFIG_FN}\") in the specified + Creating a ufs.configure file (\"{UFS_CONFIG_FN}\") in the specified run directory (run_dir): run_dir = \"{run_dir}\"''', verbose=VERBOSE) # @@ -87,35 +86,11 @@ def create_ufs_configure_file(run_dir): # #----------------------------------------------------------------------- # - # Store the settings in a temporary file - with tempfile.NamedTemporaryFile(dir="./", - mode="w+t", - prefix="ufs_config_settings", - suffix=".yaml") as tmpfile: - tmpfile.write(settings_str) - tmpfile.seek(0) - - cmd = " ".join(["uw template render", - "-i", UFS_CONFIG_TMPL_FP, - "-o", ufs_config_fp, - "-v", - "--values-file", tmpfile.name, - ] + render( + input_file = UFS_CONFIG_TMPL_FP, + output_file = ufs_config_fp, + values_src = settings, ) - - indent = " " - output = "" - try: - output = check_output(cmd, encoding="utf=8", shell=True, - stderr=STDOUT, text=True) - except CalledProcessError as e: - output = e.output - print(f"Failed with status: {e.returncode}") - sys.exit(1) - finally: - print("Output:") - for line in output.split("\n"): - print(f"{indent * 2}{line}") return True def parse_args(argv): diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index ec2b95c3f3..ba0e9f3a2b 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -11,10 +11,10 @@ import logging import os import sys -from subprocess import STDOUT, CalledProcessError, check_output from textwrap import dedent from uwtools.api.config import get_nml_config, get_yaml_config, realize +from uwtools.api.template import render from python_utils import ( log_info, @@ -112,29 +112,11 @@ def generate_FV3LAM_wflow( # Call the python script to generate the experiment's XML file # rocoto_yaml_fp = expt_config["workflow"]["ROCOTO_YAML_FP"] - cmd = " ".join(["uw template render", - "-i", template_xml_fp, - "-o", wflow_xml_fp, - "-v", - "--values-file", rocoto_yaml_fp, - ] - ) - - indent = " " - output = "" - logfunc = logging.info - try: - output = check_output(cmd, encoding="utf=8", shell=True, - stderr=STDOUT, text=True) - except CalledProcessError as e: - logfunc = logging.error - output = e.output - logging.exception(("Failed with status: %s", e.returncode)) - raise - finally: - logfunc("Output:") - for line in output.split("\n"): - logfunc("%s%s", indent * 2, line) + render( + input_file = template_xml_fp, + output_file = wflow_xml_fp, + values_src = rocoto_yaml_fp, + ) # # ----------------------------------------------------------------------- # From eea4c29e8ffea4daa487a675fc70d22668414cc7 Mon Sep 17 00:00:00 2001 From: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com> Date: Tue, 30 Apr 2024 08:53:57 -0400 Subject: [PATCH 21/42] [develop] Update weather model hash and remove "_vrfy" from bash commands (#1074) The weather model hash has been updated to 4f32a4b (April 15). Additionally, _vrfy has been removed from the cd, cp, ln, mkdir, mv, and rm bash commands in jobs, scripts, ush, and ush/bash_utils. The modified commands don't function as intended (issue #861) and aren't accepted by NCO (issue #1021). --- .cicd/scripts/srw_metric.sh | 2 +- Externals.cfg | 2 +- jobs/JREGIONAL_CHECK_POST_OUTPUT | 2 +- jobs/JREGIONAL_GET_EXTRN_MDL_FILES | 4 +- jobs/JREGIONAL_MAKE_GRID | 4 +- jobs/JREGIONAL_MAKE_ICS | 6 +- jobs/JREGIONAL_MAKE_LBCS | 6 +- jobs/JREGIONAL_MAKE_SFC_CLIMO | 4 +- jobs/JREGIONAL_RUN_FCST | 4 +- jobs/JREGIONAL_RUN_POST | 6 +- jobs/JREGIONAL_RUN_PRDGEN | 6 +- scripts/exregional_make_grid.sh | 16 +- scripts/exregional_make_ics.sh | 14 +- scripts/exregional_make_lbcs.sh | 2 +- scripts/exregional_make_orog.sh | 48 +-- scripts/exregional_make_sfc_climo.sh | 8 +- scripts/exregional_run_fcst.sh | 58 ++-- ...onal_run_met_genensprod_or_ensemblestat.sh | 2 +- ...gional_run_met_gridstat_or_pointstat_vx.sh | 2 +- ...un_met_gridstat_or_pointstat_vx_ensmean.sh | 2 +- ...un_met_gridstat_or_pointstat_vx_ensprob.sh | 2 +- scripts/exregional_run_met_pb2nc_obs.sh | 2 +- scripts/exregional_run_met_pcpcombine.sh | 2 +- scripts/exregional_run_post.sh | 32 +- scripts/exregional_run_prdgen.sh | 18 +- ush/bash_utils/check_for_preexist_dir_file.sh | 4 +- ush/bash_utils/create_symlink_to_file.sh | 6 - ush/bash_utils/filesys_cmds_vrfy.sh | 280 ------------------ ush/get_mrms_files.sh | 2 +- ush/job_preamble.sh | 6 +- ush/launch_FV3LAM_wflow.sh | 2 +- ush/source_util_funcs.sh | 10 - 32 files changed, 134 insertions(+), 430 deletions(-) delete mode 100644 ush/bash_utils/filesys_cmds_vrfy.sh diff --git a/.cicd/scripts/srw_metric.sh b/.cicd/scripts/srw_metric.sh index cbb216c959..e645a2c916 100755 --- a/.cicd/scripts/srw_metric.sh +++ b/.cicd/scripts/srw_metric.sh @@ -58,7 +58,7 @@ fi # Test directories we2e_experiment_base_dir="${we2e_experiment_base_dir:=${workspace}/../expt_dirs/metric_test}" we2e_test_dir="${we2e_test_dir:=${workspace}/tests/WE2E}" -we2e_test_name="${test_type:=grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0}" +we2e_test_name="grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0" pwd diff --git a/Externals.cfg b/Externals.cfg index 9ed03cd285..c76f7d8845 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -12,7 +12,7 @@ protocol = git repo_url = https://github.com/ufs-community/ufs-weather-model # Specify either a branch name or a hash but not both. #branch = develop -hash = 1411b90 +hash = 4f32a4b local_path = sorc/ufs-weather-model required = True diff --git a/jobs/JREGIONAL_CHECK_POST_OUTPUT b/jobs/JREGIONAL_CHECK_POST_OUTPUT index 2b1fe69bbb..a6403ebe1f 100755 --- a/jobs/JREGIONAL_CHECK_POST_OUTPUT +++ b/jobs/JREGIONAL_CHECK_POST_OUTPUT @@ -78,7 +78,7 @@ Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." # ensmem_name="mem${ENSMEM_INDX}" cycle_dir="$EXPTDIR/$CDATE" -mkdir_vrfy -p "${cycle_dir}" +mkdir -p "${cycle_dir}" touch "${cycle_dir}/post_files_exist_${ensmem_name}.txt" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_GET_EXTRN_MDL_FILES b/jobs/JREGIONAL_GET_EXTRN_MDL_FILES index 8efd332dd9..80366f0ddc 100755 --- a/jobs/JREGIONAL_GET_EXTRN_MDL_FILES +++ b/jobs/JREGIONAL_GET_EXTRN_MDL_FILES @@ -222,8 +222,8 @@ if [ $RUN_ENVIR = "nco" ]; then export EXTRN_MDL_STAGING_DIR="${EXTRN_MDL_STAGING_DIR:-${DATA}}" else export EXTRN_MDL_STAGING_DIR="${COMIN}/${EXTRN_MDL_NAME}/for_${ICS_OR_LBCS}" - mkdir_vrfy -p "${EXTRN_MDL_STAGING_DIR}" - cd_vrfy "${EXTRN_MDL_STAGING_DIR}" + mkdir -p "${EXTRN_MDL_STAGING_DIR}" + cd "${EXTRN_MDL_STAGING_DIR}" fi # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_GRID b/jobs/JREGIONAL_MAKE_GRID index 844d782bc7..8d65540d1c 100755 --- a/jobs/JREGIONAL_MAKE_GRID +++ b/jobs/JREGIONAL_MAKE_GRID @@ -153,7 +153,7 @@ This is the J-job script for the task that generates grid files. #----------------------------------------------------------------------- # check_for_preexist_dir_file "${GRID_DIR}" "${PREEXISTING_DIR_METHOD}" -mkdir_vrfy -p "${GRID_DIR}" +mkdir -p "${GRID_DIR}" # #----------------------------------------------------------------------- # @@ -162,7 +162,7 @@ mkdir_vrfy -p "${GRID_DIR}" #----------------------------------------------------------------------- # DATA="${DATA:-${GRID_DIR}/tmp}" -mkdir_vrfy -p "$DATA" +mkdir -p "$DATA" # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_MAKE_ICS b/jobs/JREGIONAL_MAKE_ICS index 70306c0a87..c4fb429f1b 100755 --- a/jobs/JREGIONAL_MAKE_ICS +++ b/jobs/JREGIONAL_MAKE_ICS @@ -60,7 +60,7 @@ if [ $RUN_ENVIR = "nco" ]; then else export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT" fi -mkdir_vrfy -p "${INPUT_DATA}" +mkdir -p "${INPUT_DATA}" # # #----------------------------------------------------------------------- @@ -72,8 +72,8 @@ mkdir_vrfy -p "${INPUT_DATA}" if [ $RUN_ENVIR = "community" ]; then DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_MAKE_ICS}" check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA + mkdir -p $DATA + cd $DATA fi # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_LBCS b/jobs/JREGIONAL_MAKE_LBCS index 16ac382fee..81e2578fd4 100755 --- a/jobs/JREGIONAL_MAKE_LBCS +++ b/jobs/JREGIONAL_MAKE_LBCS @@ -60,7 +60,7 @@ if [ $RUN_ENVIR = "nco" ]; then else export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT" fi -mkdir_vrfy -p "${INPUT_DATA}" +mkdir -p "${INPUT_DATA}" # #----------------------------------------------------------------------- # @@ -71,8 +71,8 @@ mkdir_vrfy -p "${INPUT_DATA}" if [ "${RUN_ENVIR}" = "community" ]; then DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_MAKE_LBCS}" check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA + mkdir -p $DATA + cd $DATA fi # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_SFC_CLIMO b/jobs/JREGIONAL_MAKE_SFC_CLIMO index eee25b193a..7cbd0cc23e 100755 --- a/jobs/JREGIONAL_MAKE_SFC_CLIMO +++ b/jobs/JREGIONAL_MAKE_SFC_CLIMO @@ -55,7 +55,7 @@ climatology. #----------------------------------------------------------------------- # check_for_preexist_dir_file "${SFC_CLIMO_DIR}" "${PREEXISTING_DIR_METHOD}" -mkdir_vrfy -p "${SFC_CLIMO_DIR}" +mkdir -p "${SFC_CLIMO_DIR}" # #----------------------------------------------------------------------- # @@ -66,7 +66,7 @@ mkdir_vrfy -p "${SFC_CLIMO_DIR}" DATA="${DATA:-${SFC_CLIMO_DIR}/tmp}" if [ $RUN_ENVIR != "nco" ]; then check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy $DATA + mkdir $DATA fi # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_FCST b/jobs/JREGIONAL_RUN_FCST index a7f7c96031..45f826c0d7 100755 --- a/jobs/JREGIONAL_RUN_FCST +++ b/jobs/JREGIONAL_RUN_FCST @@ -76,8 +76,8 @@ fi # #----------------------------------------------------------------------- # -mkdir_vrfy -p ${DATA}/INPUT -mkdir_vrfy -p ${DATA}/RESTART +mkdir -p ${DATA}/INPUT +mkdir -p ${DATA}/RESTART # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_RUN_POST b/jobs/JREGIONAL_RUN_POST index 97b100967c..692b3ae65d 100755 --- a/jobs/JREGIONAL_RUN_POST +++ b/jobs/JREGIONAL_RUN_POST @@ -85,7 +85,7 @@ fi if [ "${RUN_ENVIR}" = "community" ]; then DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" export COMOUT="${DATA}/postprd" - mkdir_vrfy -p "${COMOUT}" + mkdir -p "${COMOUT}" fi if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then @@ -94,9 +94,9 @@ else export DATA_FHR="${DATA:-$COMOUT}/$fhr" fi check_for_preexist_dir_file "${DATA_FHR}" "delete" -mkdir_vrfy -p "${DATA_FHR}" +mkdir -p "${DATA_FHR}" -cd_vrfy "${DATA_FHR}" +cd "${DATA_FHR}" # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_RUN_PRDGEN b/jobs/JREGIONAL_RUN_PRDGEN index 2d30ced9db..24479cb62d 100755 --- a/jobs/JREGIONAL_RUN_PRDGEN +++ b/jobs/JREGIONAL_RUN_PRDGEN @@ -84,7 +84,7 @@ DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" if [ "${RUN_ENVIR}" = "community" ]; then export COMOUT="${DATA}/postprd" fi -mkdir_vrfy -p "${COMOUT}" +mkdir -p "${COMOUT}" # subhourly post if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then @@ -93,9 +93,9 @@ else export DATA_FHR="${DATA:-$COMOUT}/$fhr" fi check_for_preexist_dir_file "${DATA_FHR}" "delete" -mkdir_vrfy -p "${DATA_FHR}" +mkdir -p "${DATA_FHR}" -cd_vrfy "${DATA_FHR}" +cd "${DATA_FHR}" # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index 1f95ea8f91..c1876651d8 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -196,7 +196,7 @@ fi # # Change location to the temporary (work) directory. # -cd_vrfy "$DATA" +cd "$DATA" print_info_msg "$VERBOSE" " Starting grid file generation..." @@ -313,7 +313,7 @@ fi # to the original directory. # grid_fp="$DATA/${grid_fn}" -cd_vrfy - +cd - print_info_msg "$VERBOSE" " Grid file generation completed successfully." @@ -392,7 +392,7 @@ set_file_param "${GLOBAL_VAR_DEFNS_FP}" "CRES" "'$CRES'" grid_fp_orig="${grid_fp}" grid_fn="${CRES}${DOT_OR_USCORE}grid.tile${TILE_RGNL}.halo${NHW}.nc" grid_fp="${GRID_DIR}/${grid_fn}" -mv_vrfy "${grid_fp_orig}" "${grid_fp}" +mv "${grid_fp_orig}" "${grid_fp}" # #----------------------------------------------------------------------- # @@ -449,7 +449,7 @@ unshaved_fp="${grid_fp}" # Once it is complete, we will move the resultant file from DATA to # GRID_DIR. # -cd_vrfy "$DATA" +cd "$DATA" # # Create an input namelist file for the shave executable to generate a # grid file with a 3-cell-wide halo from the one with a wide halo. Then @@ -477,7 +477,7 @@ The namelist file (nml_fn) used in this call is in directory DATA: nml_fn = \"${nml_fn}\" DATA = \"${DATA}\"" POST_STEP -mv_vrfy ${shaved_fp} ${GRID_DIR} +mv ${shaved_fp} ${GRID_DIR} # # Create an input namelist file for the shave executable to generate a # grid file with a 4-cell-wide halo from the one with a wide halo. Then @@ -505,7 +505,7 @@ The namelist file (nml_fn) used in this call is in directory DATA: nml_fn = \"${nml_fn}\" DATA = \"${DATA}\"" POST_STEP -mv_vrfy ${shaved_fp} ${GRID_DIR} +mv ${shaved_fp} ${GRID_DIR} # # Create an input namelist file for the shave executable to generate a # grid file without halo from the one with a wide halo. Then @@ -532,11 +532,11 @@ The namelist file (nml_fn) used in this call is in directory DATA: nml_fn = \"${nml_fn}\" DATA = \"${DATA}\"" POST_STEP -mv_vrfy ${shaved_fp} ${GRID_DIR} +mv ${shaved_fp} ${GRID_DIR} # # Change location to the original directory. # -cd_vrfy - +cd - # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh index 84d73696eb..875249b107 100755 --- a/scripts/exregional_make_ics.sh +++ b/scripts/exregional_make_ics.sh @@ -655,10 +655,10 @@ if [ "${CPL_AQM}" = "TRUE" ]; then cp -p gfs_ctrl.nc "${COMOUT}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc" cp -p gfs.bndy.nc "${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc" else - mv_vrfy out.atm.tile${TILE_RGNL}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc - mv_vrfy out.sfc.tile${TILE_RGNL}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc - mv_vrfy gfs_ctrl.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc - mv_vrfy gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc + mv out.atm.tile${TILE_RGNL}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc + mv out.sfc.tile${TILE_RGNL}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc + mv gfs_ctrl.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc + mv gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc fi # #----------------------------------------------------------------------- @@ -684,7 +684,7 @@ Please ensure that you've built this executable." print_err_msg_exit "${message_txt}" fi fi - cp_vrfy ${fvcom_exec_fp} ${INPUT_DATA}/. + cp ${fvcom_exec_fp} ${INPUT_DATA}/. fvcom_data_fp="${FVCOM_DIR}/${FVCOM_FILE}" if [ ! -f "${fvcom_data_fp}" ]; then message_txt="The file or path (fvcom_data_fp) does not exist: @@ -699,8 +699,8 @@ Please check the following user defined variables: fi fi - cp_vrfy ${fvcom_data_fp} ${INPUT_DATA}/fvcom.nc - cd_vrfy ${INPUT_DATA} + cp ${fvcom_data_fp} ${INPUT_DATA}/fvcom.nc + cd ${INPUT_DATA} PREP_STEP eval ${RUN_CMD_UTILS} ${fvcom_exec_fn} \ ${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc fvcom.nc ${FVCOM_WCSTART} ${fvcom_time} \ diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh index ca3f6401cb..5a2d24bcea 100755 --- a/scripts/exregional_make_lbcs.sh +++ b/scripts/exregional_make_lbcs.sh @@ -562,7 +562,7 @@ located in the following directory: if [ "${CPL_AQM}" = "TRUE" ]; then cp -p gfs.bndy.nc ${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc else - mv_vrfy gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc + mv gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc fi fi diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index 0deac84d49..9a3d5da7fc 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -85,16 +85,16 @@ fi #----------------------------------------------------------------------- # check_for_preexist_dir_file "${OROG_DIR}" "${PREEXISTING_DIR_METHOD}" -mkdir_vrfy -p "${OROG_DIR}" +mkdir -p "${OROG_DIR}" raw_dir="${OROG_DIR}/raw_topo" -mkdir_vrfy -p "${raw_dir}" +mkdir -p "${raw_dir}" filter_dir="${OROG_DIR}/filtered_topo" -mkdir_vrfy -p "${filter_dir}" +mkdir -p "${filter_dir}" shave_dir="${OROG_DIR}/shave_tmp" -mkdir_vrfy -p "${shave_dir}" +mkdir -p "${shave_dir}" # # #----------------------------------------------------------------------- @@ -119,15 +119,15 @@ fi # file and change location to it. # DATA="${DATA:-${raw_dir}/tmp}" -mkdir_vrfy -p "${DATA}" -cd_vrfy "${DATA}" +mkdir -p "${DATA}" +cd "${DATA}" # # Copy topography and related data files from the system directory (FIXorg) # to the temporary directory. # -cp_vrfy ${FIXorg}/thirty.second.antarctic.new.bin fort.15 -cp_vrfy ${FIXorg}/landcover30.fixed . -cp_vrfy ${FIXorg}/gmted2010.30sec.int fort.235 +cp ${FIXorg}/thirty.second.antarctic.new.bin fort.15 +cp ${FIXorg}/landcover30.fixed . +cp ${FIXorg}/gmted2010.30sec.int fort.235 # #----------------------------------------------------------------------- # @@ -221,7 +221,7 @@ POST_STEP # # Change location to the original directory. # -cd_vrfy - +cd - # #----------------------------------------------------------------------- # @@ -236,7 +236,7 @@ raw_orog_fn_prefix="${CRES}${DOT_OR_USCORE}raw_orog" fn_suffix_with_halo="tile${TILE_RGNL}.halo${NHW}.nc" raw_orog_fn="${raw_orog_fn_prefix}.${fn_suffix_with_halo}" raw_orog_fp="${raw_dir}/${raw_orog_fn}" -mv_vrfy "${raw_orog_fp_orig}" "${raw_orog_fp}" +mv "${raw_orog_fp_orig}" "${raw_orog_fp}" # #----------------------------------------------------------------------- # @@ -249,8 +249,8 @@ mv_vrfy "${raw_orog_fp_orig}" "${raw_orog_fp}" suites=( "FV3_RAP" "FV3_HRRR" "FV3_GFS_v15_thompson_mynn_lam3km" "FV3_GFS_v17_p8" ) if [[ ${suites[@]} =~ "${CCPP_PHYS_SUITE}" ]] ; then DATA="${DATA:-${OROG_DIR}/temp_orog_data}" - mkdir_vrfy -p ${DATA} - cd_vrfy ${DATA} + mkdir -p ${DATA} + cd ${DATA} mosaic_fn_gwd="${CRES}${DOT_OR_USCORE}mosaic.halo${NH4}.nc" mosaic_fp_gwd="${FIXlam}/${mosaic_fn_gwd}" grid_fn_gwd=$( get_charvar_from_netcdf "${mosaic_fp_gwd}" "gridfiles" ) || \ @@ -290,9 +290,9 @@ returned with nonzero exit code: exec_fp = \"${exec_fp}\"" POST_STEP - mv_vrfy "${CRES}${DOT_OR_USCORE}oro_data_ss.tile${TILE_RGNL}.halo${NH0}.nc" \ - "${CRES}${DOT_OR_USCORE}oro_data_ls.tile${TILE_RGNL}.halo${NH0}.nc" \ - "${OROG_DIR}" + mv "${CRES}${DOT_OR_USCORE}oro_data_ss.tile${TILE_RGNL}.halo${NH0}.nc" \ + "${CRES}${DOT_OR_USCORE}oro_data_ls.tile${TILE_RGNL}.halo${NH0}.nc" \ + "${OROG_DIR}" fi # @@ -390,7 +390,7 @@ fn_suffix_without_halo="tile${TILE_RGNL}.nc" filtered_orog_fn_prefix="${CRES}${DOT_OR_USCORE}filtered_orog" filtered_orog_fp_prefix="${filter_dir}/${filtered_orog_fn_prefix}" filtered_orog_fp="${filtered_orog_fp_prefix}.${fn_suffix_without_halo}" -cp_vrfy "${raw_orog_fp}" "${filtered_orog_fp}" +cp "${raw_orog_fp}" "${filtered_orog_fp}" # # The orography filtering executable looks for the grid file specified # in the grid mosaic file (more specifically, specified by the gridfiles @@ -424,7 +424,7 @@ EOF # in which it is located). Thus, since above we created the input.nml # file in filter_dir, we must also run the executable out of this directory. # -cd_vrfy "${filter_dir}" +cd "${filter_dir}" # # Run the orography filtering executable. # @@ -444,11 +444,11 @@ POST_STEP filtered_orog_fn_orig=$( basename "${filtered_orog_fp}" ) filtered_orog_fn="${filtered_orog_fn_prefix}.${fn_suffix_with_halo}" filtered_orog_fp=$( dirname "${filtered_orog_fp}" )"/${filtered_orog_fn}" -mv_vrfy "${filtered_orog_fn_orig}" "${filtered_orog_fn}" +mv "${filtered_orog_fn_orig}" "${filtered_orog_fn}" # # Change location to the original directory. # -cd_vrfy - +cd - print_info_msg "$VERBOSE" " Filtering of orography complete." @@ -485,7 +485,7 @@ unshaved_fp="${filtered_orog_fp}" # We perform the work in shave_dir, so change location to that directory. # Once it is complete, we move the resultant file from shave_dir to OROG_DIR. # -cd_vrfy "${shave_dir}" +cd "${shave_dir}" # # Create an input namelist file for the shave executable to generate an # orography file without a halo from the one with a wide halo. Then call @@ -513,7 +513,7 @@ The namelist file (nml_fn) used in this call is in directory shave_dir: nml_fn = \"${nml_fn}\" shave_dir = \"${shave_dir}\"" POST_STEP -mv_vrfy ${shaved_fp} ${OROG_DIR} +mv ${shaved_fp} ${OROG_DIR} # # Create an input namelist file for the shave executable to generate an # orography file with a 4-cell-wide halo from the one with a wide halo. @@ -541,11 +541,11 @@ The namelist file (nml_fn) used in this call is in directory shave_dir: nml_fn = \"${nml_fn}\" shave_dir = \"${shave_dir}\"" POST_STEP -mv_vrfy "${shaved_fp}" "${OROG_DIR}" +mv "${shaved_fp}" "${OROG_DIR}" # # Change location to the original directory. # -cd_vrfy - +cd - # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_make_sfc_climo.sh b/scripts/exregional_make_sfc_climo.sh index 868029a488..c4ee8f25b1 100755 --- a/scripts/exregional_make_sfc_climo.sh +++ b/scripts/exregional_make_sfc_climo.sh @@ -70,7 +70,7 @@ ulimit -s unlimited # #----------------------------------------------------------------------- # -cd_vrfy $DATA +cd $DATA # #----------------------------------------------------------------------- # @@ -162,7 +162,7 @@ case "$GTYPE" in # for fn in *.nc; do if [[ -f $fn ]]; then - mv_vrfy $fn ${SFC_CLIMO_DIR}/${CRES}_${fn} + mv $fn ${SFC_CLIMO_DIR}/${CRES}_${fn} fi done ;; @@ -181,7 +181,7 @@ case "$GTYPE" in for fn in *.halo.nc; do if [ -f $fn ]; then bn="${fn%.halo.nc}" - mv_vrfy $fn ${SFC_CLIMO_DIR}/${CRES}.${bn}.halo${NH4}.nc + mv $fn ${SFC_CLIMO_DIR}/${CRES}.${bn}.halo${NH4}.nc fi done # @@ -194,7 +194,7 @@ case "$GTYPE" in for fn in *.nc; do if [ -f $fn ]; then bn="${fn%.nc}" - mv_vrfy $fn ${SFC_CLIMO_DIR}/${CRES}.${bn}.halo${NH0}.nc + mv $fn ${SFC_CLIMO_DIR}/${CRES}.${bn}.halo${NH0}.nc fi done ;; diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index b9f6e3ac32..f769d4e225 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -98,7 +98,7 @@ Creating links in the INPUT subdirectory of the current run directory to the grid and (filtered) orography files ..." # Create links to fix files in the FIXlam directory. -cd_vrfy ${DATA}/INPUT +cd ${DATA}/INPUT # # For experiments in which the TN_MAKE_GRID task is run, we make the @@ -219,7 +219,7 @@ of the current run directory (DATA), where DATA = \"${DATA}\" ..." -cd_vrfy ${DATA}/INPUT +cd ${DATA}/INPUT # # The symlinks to be created point to files in the same directory (INPUT), @@ -288,7 +288,7 @@ fi # #----------------------------------------------------------------------- # -cd_vrfy ${DATA} +cd ${DATA} print_info_msg "$VERBOSE" " Creating links in the current run directory (DATA) to fixed (i.e. @@ -360,8 +360,8 @@ fi # #----------------------------------------------------------------------- # -cd_vrfy ${DATA} -rm_vrfy -f time_stamp.out +cd ${DATA} +rm -f time_stamp.out # #----------------------------------------------------------------------- # @@ -398,7 +398,7 @@ create_symlink_to_file ${FIELD_TABLE_FP} ${DATA}/${FIELD_TABLE_FN} ${relative_li create_symlink_to_file ${FIELD_DICT_FP} ${DATA}/${FIELD_DICT_FN} ${relative_link_flag} if [ ${WRITE_DOPOST} = "TRUE" ]; then - cp_vrfy ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat + cp ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat if [ ${USE_CUSTOM_POST_CONFIG_FILE} = "TRUE" ]; then post_config_fp="${CUSTOM_POST_CONFIG_FP}" print_info_msg " @@ -416,9 +416,9 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then post_config_fp = \"${post_config_fp}\" ====================================================================" fi - cp_vrfy ${post_config_fp} ./postxconfig-NT_FH00.txt - cp_vrfy ${post_config_fp} ./postxconfig-NT.txt - cp_vrfy ${PARMdir}/upp/params_grib2_tbl_new . + cp ${post_config_fp} ./postxconfig-NT_FH00.txt + cp ${post_config_fp} ./postxconfig-NT.txt + cp ${PARMdir}/upp/params_grib2_tbl_new . # Set itag for inline-post: if [ "${CPL_AQM}" = "TRUE" ]; then post_itag_add="aqf_on=.true.," @@ -445,7 +445,7 @@ fi #---------------------------------------------------------------------- # -cp_vrfy ${CCPP_PHYS_DIR}/noahmptable.tbl . +cp ${CCPP_PHYS_DIR}/noahmptable.tbl . # #----------------------------------------------------------------------- @@ -460,9 +460,9 @@ if ([ "${DO_SPP}" = "TRUE" ] || [ "${DO_SPPT}" = "TRUE" ] || [ "${DO_SHUM}" = "T STOCH="TRUE" fi if [ "${STOCH}" == "TRUE" ]; then - cp_vrfy ${FV3_NML_STOCH_FP} ${DATA}/${FV3_NML_FN} + cp ${FV3_NML_STOCH_FP} ${DATA}/${FV3_NML_FN} else - ln_vrfy -sf ${FV3_NML_FP} ${DATA}/${FV3_NML_FN} + ln -sf ${FV3_NML_FP} ${DATA}/${FV3_NML_FN} fi # @@ -508,10 +508,10 @@ fi # flag_fcst_restart="FALSE" if [ "${DO_FCST_RESTART}" = "TRUE" ] && [ "$(ls -A ${DATA}/RESTART )" ]; then - cp_vrfy input.nml input.nml_orig - cp_vrfy model_configure model_configure_orig + cp input.nml input.nml_orig + cp model_configure model_configure_orig if [ "${CPL_AQM}" = "TRUE" ]; then - cp_vrfy aqm.rc aqm.rc_orig + cp aqm.rc aqm.rc_orig fi relative_link_flag="FALSE" flag_fcst_restart="TRUE" @@ -558,14 +558,14 @@ for the current cycle's (cdate) run directory (DATA) failed: done # Create soft-link of restart files in INPUT directory - cd_vrfy ${DATA}/INPUT + cd ${DATA}/INPUT for file_id in "${file_ids[@]}"; do - rm_vrfy "${file_id}" + rm "${file_id}" target="${DATA}/RESTART/${rst_yyyymmdd}.${rst_hh}0000.${file_id}" symlink="${file_id}" create_symlink_to_file $target $symlink ${relative_link_flag} done - cd_vrfy ${DATA} + cd ${DATA} fi # #----------------------------------------------------------------------- @@ -668,7 +668,7 @@ fi # if [ "${RUN_ENVIR}" = "nco" ] && [ "${CPL_AQM}" = "TRUE" ]; then # create an intermediate symlink to RESTART - ln_vrfy -sf "${DATA}/RESTART" "${COMIN}/RESTART" + ln -sf "${DATA}/RESTART" "${COMIN}/RESTART" fi # #----------------------------------------------------------------------- @@ -728,14 +728,14 @@ POST_STEP if [ "${CPL_AQM}" = "TRUE" ]; then if [ "${RUN_ENVIR}" = "nco" ]; then if [ -d "${COMIN}/RESTART" ] && [ "$(ls -A ${DATA}/RESTART)" ]; then - rm_vrfy -rf "${COMIN}/RESTART" + rm -rf "${COMIN}/RESTART" fi if [ "$(ls -A ${DATA}/RESTART)" ]; then - cp_vrfy -Rp ${DATA}/RESTART ${COMIN} + cp -Rp ${DATA}/RESTART ${COMIN} fi fi - cp_vrfy -p ${DATA}/${AQM_RC_PRODUCT_FN} ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${AQM_RC_PRODUCT_FN} + cp -p ${DATA}/${AQM_RC_PRODUCT_FN} ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${AQM_RC_PRODUCT_FN} fhr_ct=0 fhr=0 @@ -745,8 +745,8 @@ if [ "${CPL_AQM}" = "TRUE" ]; then source_phy="${DATA}/phyf${fhr_ct}.nc" target_dyn="${COMIN}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr_ct}.nc" target_phy="${COMIN}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr_ct}.nc" - [ -f ${source_dyn} ] && cp_vrfy -p ${source_dyn} ${target_dyn} - [ -f ${source_phy} ] && cp_vrfy -p ${source_phy} ${target_phy} + [ -f ${source_dyn} ] && cp -p ${source_dyn} ${target_dyn} + [ -f ${source_phy} ] && cp -p ${source_phy} ${target_phy} (( fhr=fhr+1 )) done fi @@ -767,9 +767,9 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then if [ "${RUN_ENVIR}" != "nco" ]; then export COMOUT="${DATA}/postprd" fi - mkdir_vrfy -p "${COMOUT}" + mkdir -p "${COMOUT}" - cd_vrfy ${COMOUT} + cd ${COMOUT} for fhr in $(seq -f "%03g" 0 ${FCST_LEN_HRS}); do @@ -796,7 +796,7 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then post_orig_fn="${FID}.${post_fn_suffix}" post_renamed_fn="${NET}.${cycle}${dot_ensmem}.${fid}.${post_renamed_fn_suffix}" - mv_vrfy ${DATA}/${post_orig_fn} ${post_renamed_fn} + mv ${DATA}/${post_orig_fn} ${post_renamed_fn} if [ $RUN_ENVIR != "nco" ]; then basetime=$( $DATE_UTIL --date "$yyyymmdd $hh" +%y%j%H%M ) symlink_suffix="_${basetime}f${fhr}${post_mn}" @@ -809,8 +809,8 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then done if [ "${CPL_AQM}" = "TRUE" ]; then - mv_vrfy ${DATA}/dynf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr}.nc - mv_vrfy ${DATA}/phyf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr}.nc + mv ${DATA}/dynf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr}.nc + mv ${DATA}/phyf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr}.nc fi done diff --git a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh index fe0e119b19..aa24abbb10 100755 --- a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh +++ b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh @@ -271,7 +271,7 @@ set_vx_fhr_list \ # #----------------------------------------------------------------------- # -mkdir_vrfy -p "${OUTPUT_DIR}" +mkdir -p "${OUTPUT_DIR}" # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh index 7eb1ce4605..93444069cb 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh @@ -272,7 +272,7 @@ set_vx_fhr_list \ # #----------------------------------------------------------------------- # -mkdir_vrfy -p "${OUTPUT_DIR}" +mkdir -p "${OUTPUT_DIR}" # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh index 458dcec33f..4b9716493e 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh @@ -221,7 +221,7 @@ set_vx_fhr_list \ # #----------------------------------------------------------------------- # -mkdir_vrfy -p "${OUTPUT_DIR}" +mkdir -p "${OUTPUT_DIR}" # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh index fc735845c9..918fb900d3 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh @@ -173,7 +173,7 @@ set_vx_fhr_list \ # #----------------------------------------------------------------------- # -mkdir_vrfy -p "${OUTPUT_DIR}" +mkdir -p "${OUTPUT_DIR}" # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_pb2nc_obs.sh b/scripts/exregional_run_met_pb2nc_obs.sh index 92d39102fc..2528c32ced 100755 --- a/scripts/exregional_run_met_pb2nc_obs.sh +++ b/scripts/exregional_run_met_pb2nc_obs.sh @@ -140,7 +140,7 @@ set_vx_fhr_list \ # #----------------------------------------------------------------------- # -mkdir_vrfy -p "${OUTPUT_DIR}" +mkdir -p "${OUTPUT_DIR}" # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_pcpcombine.sh b/scripts/exregional_run_met_pcpcombine.sh index 7eabe02901..fb495a6145 100755 --- a/scripts/exregional_run_met_pcpcombine.sh +++ b/scripts/exregional_run_met_pcpcombine.sh @@ -226,7 +226,7 @@ set_vx_fhr_list \ # #----------------------------------------------------------------------- # -mkdir_vrfy -p "${OUTPUT_DIR}" +mkdir -p "${OUTPUT_DIR}" # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_post.sh b/scripts/exregional_run_post.sh index ab3377e6b4..1bf45bd965 100755 --- a/scripts/exregional_run_post.sh +++ b/scripts/exregional_run_post.sh @@ -80,8 +80,8 @@ fi # #----------------------------------------------------------------------- # -rm_vrfy -f fort.* -cp_vrfy ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat +rm -f fort.* +cp ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat if [ ${USE_CUSTOM_POST_CONFIG_FILE} = "TRUE" ]; then post_config_fp="${CUSTOM_POST_CONFIG_FP}" print_info_msg " @@ -105,18 +105,18 @@ temporary work directory (DATA_FHR): DATA_FHR = \"${DATA_FHR}\" ====================================================================" fi -cp_vrfy ${post_config_fp} ./postxconfig-NT.txt -cp_vrfy ${PARMdir}/upp/params_grib2_tbl_new . +cp ${post_config_fp} ./postxconfig-NT.txt +cp ${PARMdir}/upp/params_grib2_tbl_new . if [ ${USE_CRTM} = "TRUE" ]; then - cp_vrfy ${CRTM_DIR}/Nalli.IRwater.EmisCoeff.bin ./ - cp_vrfy ${CRTM_DIR}/FAST*.bin ./ - cp_vrfy ${CRTM_DIR}/NPOESS.IRland.EmisCoeff.bin ./ - cp_vrfy ${CRTM_DIR}/NPOESS.IRsnow.EmisCoeff.bin ./ - cp_vrfy ${CRTM_DIR}/NPOESS.IRice.EmisCoeff.bin ./ - cp_vrfy ${CRTM_DIR}/AerosolCoeff.bin ./ - cp_vrfy ${CRTM_DIR}/CloudCoeff.bin ./ - cp_vrfy ${CRTM_DIR}/*.SpcCoeff.bin ./ - cp_vrfy ${CRTM_DIR}/*.TauCoeff.bin ./ + cp ${CRTM_DIR}/Nalli.IRwater.EmisCoeff.bin ./ + cp ${CRTM_DIR}/FAST*.bin ./ + cp ${CRTM_DIR}/NPOESS.IRland.EmisCoeff.bin ./ + cp ${CRTM_DIR}/NPOESS.IRsnow.EmisCoeff.bin ./ + cp ${CRTM_DIR}/NPOESS.IRice.EmisCoeff.bin ./ + cp ${CRTM_DIR}/AerosolCoeff.bin ./ + cp ${CRTM_DIR}/CloudCoeff.bin ./ + cp ${CRTM_DIR}/*.SpcCoeff.bin ./ + cp ${CRTM_DIR}/*.TauCoeff.bin ./ print_info_msg " ==================================================================== Copying the external CRTM fix files from CRTM_DIR to the temporary @@ -270,7 +270,7 @@ post_renamed_fn_suffix="f${fhr}${post_mn_or_null}.${POST_OUTPUT_DOMAIN_NAME}.gri # generates (i.e. "...prslev..." and "...natlev..." files) and move, # rename, and create symlinks to them. # -cd_vrfy "${COMOUT}" +cd "${COMOUT}" basetime=$( $DATE_UTIL --date "$yyyymmdd $hh" +%y%j%H%M ) symlink_suffix="${dot_ensmem}.${basetime}f${fhr}${post_mn}" if [ "${CPL_AQM}" = "TRUE" ]; then @@ -282,7 +282,7 @@ for fid in "${fids[@]}"; do FID=$(echo_uppercase $fid) post_orig_fn="${FID}.${post_fn_suffix}" post_renamed_fn="${NET}.${cycle}${dot_ensmem}.${fid}.${post_renamed_fn_suffix}" - mv_vrfy ${DATA_FHR}/${post_orig_fn} ${post_renamed_fn} + mv ${DATA_FHR}/${post_orig_fn} ${post_renamed_fn} if [ $RUN_ENVIR != "nco" ]; then create_symlink_to_file ${post_renamed_fn} ${FID}${symlink_suffix} TRUE fi @@ -292,7 +292,7 @@ for fid in "${fids[@]}"; do fi done -rm_vrfy -rf ${DATA_FHR} +rm -rf ${DATA_FHR} # #----------------------------------------------------------------------- diff --git a/scripts/exregional_run_prdgen.sh b/scripts/exregional_run_prdgen.sh index 8fc72dff1c..5d1bfbf447 100755 --- a/scripts/exregional_run_prdgen.sh +++ b/scripts/exregional_run_prdgen.sh @@ -166,7 +166,7 @@ net4=$(echo ${NET:0:4} | tr '[:upper:]' '[:lower:]') for leveltype in prslev natlev ififip testbed do if [ -f ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 ]; then - ln_vrfy -sf --relative ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2 + ln -sf --relative ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2 wgrib2 ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2 -s > ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2.idx fi done @@ -184,7 +184,7 @@ if [ ${PREDEF_GRID_NAME} = "RRFS_NA_3km" ]; then DATA=$COMOUT DATAprdgen=$DATA/prdgen_${fhr} -mkdir_vrfy $DATAprdgen +mkdir $DATAprdgen wgrib2 ${COMOUT}/${NET}.${cycle}.prslev.f${fhr}.grib2 >& $DATAprdgen/prslevf${fhr}.txt @@ -223,7 +223,7 @@ for domain in ${domains[@]} do for task in $(seq ${tasks[count]}) do - mkdir_vrfy -p $DATAprdgen/prdgen_${domain}_${task} + mkdir -p $DATAprdgen/prdgen_${domain}_${task} echo "$SCRIPTSdir/exregional_run_prdgen_subpiece.sh $fhr $cyc $task $domain ${DATAprdgen} ${COMOUT} &" >> $DATAprdgen/poescript_${fhr} done count=$count+1 @@ -269,7 +269,7 @@ else # if [ ${#ADDNL_OUTPUT_GRIDS[@]} -gt 0 ]; then - cd_vrfy ${COMOUT} + cd ${COMOUT} grid_specs_130="lambert:265:25.000000 233.862000:451:13545.000000 16.281000:337:13545.000000" grid_specs_200="lambert:253:50.000000 285.720000:108:16232.000000 16.201000:94:16232.000000" @@ -289,7 +289,7 @@ if [ ${#ADDNL_OUTPUT_GRIDS[@]} -gt 0 ]; then eval grid_specs=\$grid_specs_${grid} subdir=${COMOUT}/${grid}_grid - mkdir_vrfy -p ${subdir}/${fhr} + mkdir -p ${subdir}/${fhr} bg_remap=${subdir}/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 # Interpolate fields to new grid @@ -317,11 +317,11 @@ if [ ${#ADDNL_OUTPUT_GRIDS[@]} -gt 0 ]; then rm -f ${subdir}/${fhr}/tmp_${grid}.grib2 # Save to com directory - mkdir_vrfy -p ${COMOUT}/${grid}_grid - cp_vrfy ${bg_remap} ${COMOUT}/${grid}_grid/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 + mkdir -p ${COMOUT}/${grid}_grid + cp ${bg_remap} ${COMOUT}/${grid}_grid/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 if [[ -f ${COMOUT}/${grid}_grid/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 ]]; then - ln_vrfy -fs --relative ${COMOUT}/${grid}_grid/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2 + ln -fs --relative ${COMOUT}/${grid}_grid/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2 wgrib2 ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2 -s > ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2.idx fi @@ -331,7 +331,7 @@ fi fi # block for parallel or series wgrib2 runs. -rm_vrfy -rf ${DATA_FHR} +rm -rf ${DATA_FHR} # #----------------------------------------------------------------------- # diff --git a/ush/bash_utils/check_for_preexist_dir_file.sh b/ush/bash_utils/check_for_preexist_dir_file.sh index 4ca55766d2..2843222230 100644 --- a/ush/bash_utils/check_for_preexist_dir_file.sh +++ b/ush/bash_utils/check_for_preexist_dir_file.sh @@ -107,7 +107,7 @@ where the arguments are defined as follows: # "delete") - rm_vrfy -rf "${dir_or_file}" + rm -rf "${dir_or_file}" ;; # #----------------------------------------------------------------------- @@ -134,7 +134,7 @@ Specified directory or file (dir_or_file) already exists: Moving (renaming) preexisting directory or file to: old_dir_or_file = \"${old_dir_or_file}\"" - mv_vrfy "${dir_or_file}" "${old_dir_or_file}" + mv "${dir_or_file}" "${old_dir_or_file}" ;; # #----------------------------------------------------------------------- diff --git a/ush/bash_utils/create_symlink_to_file.sh b/ush/bash_utils/create_symlink_to_file.sh index dd25cfa2fd..c6a5213326 100644 --- a/ush/bash_utils/create_symlink_to_file.sh +++ b/ush/bash_utils/create_symlink_to_file.sh @@ -80,12 +80,6 @@ not exist or is not a file: # # Create the symlink. # -# Important note: -# In the ln_vrfy command below, do not quote ${relative_flag} because if -# is quoted (either single or double quotes) but happens to be a null -# string, it will be treated as the (empty) name of (or path to) the -# target and will cause an error. -# #----------------------------------------------------------------------- # ln -sf ${relative_flag} "$target" "$symlink" diff --git a/ush/bash_utils/filesys_cmds_vrfy.sh b/ush/bash_utils/filesys_cmds_vrfy.sh deleted file mode 100644 index b355d293ad..0000000000 --- a/ush/bash_utils/filesys_cmds_vrfy.sh +++ /dev/null @@ -1,280 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This is a generic function that executes the specified command (e.g. -# "cp", "mv", etc) with the specified options/arguments and then verifies -# that the command executed without errors. The first argument to this -# function is the command to execute while the remaining ones are the -# options/arguments to be passed to that command. -# -#----------------------------------------------------------------------- -# -function filesys_cmd_vrfy() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# - local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) - local scrfunc_fn=$( basename "${scrfunc_fp}" ) - local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# - local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Get information about the script or function that calls this function. -# Note that caller_name will be set as follows: -# -# 1) If the caller is a function, caller_name will be set to the name of -# that function. -# 2) If the caller is a sourced script, caller_name will be set to -# "script". Note that a sourced script cannot be the top level -# script since by defintion, it is sourced by another script or -# function. -# 3) If the caller is the top-level script, caller_name will be set to -# "main". -# -# Thus, if caller_name is set to "script" or "main", the caller is a -# script, and if it is set to anything else, the caller is a function. -# -# Below, the index into FUNCNAME and BASH_SOURCE is 2 (not 1 as is usually -# the case) because this function is called by functions such as cp_vrfy, -# mv_vrfy, rm_vrfy, ln_vrfy, mkdir_vrfy, and cd_vrfy, but these are just -# wrappers, and in the error and informational messages, we are really -# interested in the scripts/functions that in turn call these wrappers. -# -#----------------------------------------------------------------------- -# - local caller_name="main" - local caller_fp="" - if [ -z "${BASH_SOURCE[2]-x}" ]; then - caller_fp=$( $READLINK -f "${BASH_SOURCE[2]}" ) - local caller_fn=$( basename "${caller_fp}" ) - local caller_dir=$( dirname "${caller_fp}" ) - caller_name="${FUNCNAME[2]}" - fi -# -#----------------------------------------------------------------------- -# -# Declare local variables that are used later below. -# -#----------------------------------------------------------------------- -# - local cmd \ - output \ - exit_code \ - double_space \ - script_or_function -# -#----------------------------------------------------------------------- -# -# Check that at least one argument is supplied. -# -#----------------------------------------------------------------------- -# - if [ "$#" -lt 1 ]; then - - print_err_msg_exit " -Incorrect number of arguments specified: - - Function name: \"${func_name}\" - Number of arguments specified: $# - -Usage: - - ${func_name} cmd [args_to_cmd] - -where \"cmd\" is the name of the command to execute and \"args_to_cmd\" -are zero or more options and arguments to pass to that command. -" - - fi -# -#----------------------------------------------------------------------- -# -# The first argument to this function is the command to execute while -# the remaining ones are the arguments to that command. Extract the -# command and save it in the variable "cmd". Then shift the argument -# list so that $@ contains the arguments to the command but not the -# name of the command itself. -# -#----------------------------------------------------------------------- -# - cmd="$1" - shift -# -#----------------------------------------------------------------------- -# -# Pass the arguments to the command and execute it, saving the outputs -# to stdout and stderr in the variable "output". Also, save the exit -# code from the execution. -# -#----------------------------------------------------------------------- -# - local output=$( "$cmd" "$@" 2>&1 ) - local exit_code=$? -# -#----------------------------------------------------------------------- -# -# If output is not empty, it will be printed to stdout below either as -# an error message or an informational message. In either case, format -# it by adding a double space to the beginning of each line. -# -#----------------------------------------------------------------------- -# - if [ -n "$output" ]; then - local double_space=" " - output="${double_space}${output}" - output=${output/$'\n'/$'\n'${double_space}} - fi -# -#----------------------------------------------------------------------- -# -# If the exit code from the execution of cmd above is nonzero, print out -# an error message and exit. -# -#----------------------------------------------------------------------- -# - if [ "${caller_name}" = "main" ] || \ - [ "${caller_name}" = "script" ]; then - local script_or_function="the script" - else - local script_or_function="function \"${caller_name}\"" - fi - - if [ ${exit_code} -ne 0 ]; then - - print_err_msg_exit "\ -Call to function \"${cmd}_vrfy\" failed. This function was called from -${script_or_function} in file: - - \"${caller_fp}\" - -Error message from \"${cmd}_vrfy\" function's \"$cmd\" operation: -$output" - - fi -# -#----------------------------------------------------------------------- -# -# If the exit code from the execution of cmd above is zero, continue. -# -# First, check if cmd is set to "cd". If so, the execution of cmd above -# in a separate subshell [which is what happens when using the $("$cmd") -# construct above] will change directory in that subshell but not in the -# current shell. Thus, rerun the "cd" command in the current shell. -# -#----------------------------------------------------------------------- -# - if [ "$cmd" = "cd" ]; then - "$cmd" "$@" 2>&1 > /dev/null - fi -# -#----------------------------------------------------------------------- -# -# If output is not empty, print out whatever message it contains (e.g. -# it might contain a warning or other informational message). -# -#----------------------------------------------------------------------- -# - if [ -n "$output" ]; then - - print_info_msg " -\"${cmd}_vrfy\" operation returned with a message. This command was -issued from ${script_or_function} in file: - - \"${caller_fp}\" - -Message from \"${cmd}_vrfy\" function's \"$cmd\" operation: -$output" - - fi -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 - -} - - -# -#----------------------------------------------------------------------- -# -# The following are functions are counterparts of common filesystem -# commands "with verification", i.e. they execute a filesystem command -# (such as "cp" and "mv") and then verify that the execution was successful. -# -# These functions are called using the "filesys_cmd_vrfy" function defined -# above. In each of these functions, we: -# -# 1) Save current shell options (in a global array) and then set new -# options for this script/function. -# 2) Call the generic function "filesys_cmd_vrfy" with the command of -# interest (e.g. "cp") as the first argument and the arguments passed -# in as the rest. -# 3) Restore the shell options saved at the beginning of the function. -# -#----------------------------------------------------------------------- -# - -function cp_vrfy() { - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 - filesys_cmd_vrfy "cp" "$@" - { restore_shell_opts; } > /dev/null 2>&1 -} - -function mv_vrfy() { - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 - filesys_cmd_vrfy "mv" "$@" - { restore_shell_opts; } > /dev/null 2>&1 -} - -function rm_vrfy() { - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 - filesys_cmd_vrfy "rm" "$@" - { restore_shell_opts; } > /dev/null 2>&1 -} - -function ln_vrfy() { - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 - filesys_cmd_vrfy "$LN_UTIL" "$@" - { restore_shell_opts; } > /dev/null 2>&1 -} - -function mkdir_vrfy() { - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 - filesys_cmd_vrfy "mkdir" "$@" - { restore_shell_opts; } > /dev/null 2>&1 -} - -function cd_vrfy() { - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 - filesys_cmd_vrfy "cd" "$@" - { restore_shell_opts; } > /dev/null 2>&1 -} - diff --git a/ush/get_mrms_files.sh b/ush/get_mrms_files.sh index b669094488..65a99cc1bd 100644 --- a/ush/get_mrms_files.sh +++ b/ush/get_mrms_files.sh @@ -54,7 +54,7 @@ function get_mrms_files () { # 10 represents a significant number of vertical levels of data if [ ${numgrib2} -ge 10 ] && [ ! -e filelist_mrms ]; then - cp_vrfy ${nsslfile1} ${output_path} + cp ${nsslfile1} ${output_path} ls ${output_path}/${file_matches} > ${output_path}/filelist_mrms echo "Copying mrms files for ${YYYY}${MM}${DD}-${cyc}${min}" fi diff --git a/ush/job_preamble.sh b/ush/job_preamble.sh index e9c3683c40..16b99393a2 100644 --- a/ush/job_preamble.sh +++ b/ush/job_preamble.sh @@ -88,7 +88,7 @@ fi export DATA= if [ "${RUN_ENVIR}" = "nco" ]; then export DATA=${DATAROOT}/${jobid} - mkdir_vrfy -p $DATA + mkdir -p $DATA cd $DATA fi # @@ -174,10 +174,10 @@ export -f POST_STEP # if [ "${RUN_ENVIR}" = "nco" ] && [ "${WORKFLOW_MANAGER}" != "ecflow" ]; then __EXPTLOG=${EXPTDIR}/log - mkdir_vrfy -p ${__EXPTLOG} + mkdir -p ${__EXPTLOG} for i in ${LOGDIR}/*.${WORKFLOW_ID}.log; do __LOGB=$(basename $i .${WORKFLOW_ID}.log) - ln_vrfy -sf $i ${__EXPTLOG}/${__LOGB}.log + ln -sf $i ${__EXPTLOG}/${__LOGB}.log done fi # diff --git a/ush/launch_FV3LAM_wflow.sh b/ush/launch_FV3LAM_wflow.sh index cfbedac9cf..92dd24aee6 100755 --- a/ush/launch_FV3LAM_wflow.sh +++ b/ush/launch_FV3LAM_wflow.sh @@ -166,7 +166,7 @@ wflow_status="IN PROGRESS" # #----------------------------------------------------------------------- # -cd_vrfy "$exptdir" +cd "$exptdir" # #----------------------------------------------------------------------- # diff --git a/ush/source_util_funcs.sh b/ush/source_util_funcs.sh index fa097de34d..7fe3025d6a 100644 --- a/ush/source_util_funcs.sh +++ b/ush/source_util_funcs.sh @@ -115,16 +115,6 @@ function source_util_funcs() { # #----------------------------------------------------------------------- # -# Source the file containing functions that execute filesystem commands -# (e.g. "cp", "mv") with verification (i.e. verifying that the commands -# completed successfully). -# -#----------------------------------------------------------------------- -# - . ${bashutils_dir}/filesys_cmds_vrfy.sh -# -#----------------------------------------------------------------------- -# # Source the file containing the function that searches an array for a # specified string. # From 2d94ed42357d720765fd4d45b6bc88682957968e Mon Sep 17 00:00:00 2001 From: gsketefian <31046882+gsketefian@users.noreply.github.com> Date: Wed, 1 May 2024 14:07:11 -0600 Subject: [PATCH 22/42] [develop] Streamline SRW App's interface to MET/METplus (#1005) This PR streamlines the SRW App's interface to the MET/METplus verification tool and implements some bug fixes. Details: * Replace the field-specific METplus configuration jinja2 templates associated with each METplus tool (these templates are hard-coded for each field) with a single template that contains jinja2 code to any valid field to be verified. * Add yaml configuration files for verification that specify the fields to verify (including field levels and thresholds). This is in order to consolidate the field/level/threshold information in one place instead of having it spread out and repeated in several hard-coded configuration files. * Add a python script (decouple_fcst_obs_vx_config.py) to parse these two vx configuration files and create a dictionary of the field/level/threshold information that can then be passed to the unified workflow templating tool. * Modify the ex-scripts for the verification tasks (exregional_run_met_....sh) to allow the use of the new jinja2 METplus config templates. This includes adding code to call the new script decouple_fcst_obs_vx_config.py and then passing its output to the unified workflow templating tool to generate METplus configuration files from the (new) jinja2 templates. * Add new environment variables to the rocoto workflow configuration files (verify_[pre|det|ens].yaml) that are needed for using the new jinja2 METplus config templates. * Bug fixes --- jobs/JREGIONAL_CHECK_POST_OUTPUT | 15 +- jobs/JREGIONAL_PARSE_VX_CONFIG | 97 ++ jobs/JREGIONAL_RUN_MET_PB2NC_OBS | 4 +- parm/metplus/EnsembleStat.conf | 759 ++++++++++++++ parm/metplus/EnsembleStat_ADPSFC.conf | 307 ------ parm/metplus/EnsembleStat_ADPUPA.conf | 351 ------- parm/metplus/EnsembleStat_APCP.conf | 258 ----- parm/metplus/EnsembleStat_ASNOW.conf | 259 ----- parm/metplus/EnsembleStat_REFC.conf | 265 ----- parm/metplus/EnsembleStat_RETOP.conf | 267 ----- parm/metplus/GenEnsProd.conf | 390 ++++++++ parm/metplus/GenEnsProd_ADPSFC.conf | 219 ---- parm/metplus/GenEnsProd_ADPUPA.conf | 236 ----- parm/metplus/GenEnsProd_APCP.conf | 191 ---- parm/metplus/GenEnsProd_ASNOW.conf | 192 ---- parm/metplus/GenEnsProd_REFC.conf | 191 ---- parm/metplus/GenEnsProd_RETOP.conf | 192 ---- parm/metplus/GridStat_APCP.conf | 309 ------ parm/metplus/GridStat_ASNOW.conf | 283 ------ parm/metplus/GridStat_REFC.conf | 315 ------ parm/metplus/GridStat_RETOP.conf | 317 ------ parm/metplus/GridStat_ensmean.conf | 662 ++++++++++++ parm/metplus/GridStat_ensmean_APCP.conf | 282 ------ parm/metplus/GridStat_ensmean_ASNOW.conf | 287 ------ parm/metplus/GridStat_ensmean_REFC.conf | 313 ------ parm/metplus/GridStat_ensmean_RETOP.conf | 315 ------ parm/metplus/GridStat_ensprob.conf | 675 +++++++++++++ parm/metplus/GridStat_ensprob_APCP.conf | 362 ------- parm/metplus/GridStat_ensprob_ASNOW.conf | 384 ------- parm/metplus/GridStat_ensprob_REFC.conf | 382 ------- parm/metplus/GridStat_ensprob_RETOP.conf | 390 -------- parm/metplus/GridStat_or_PointStat.conf | 940 ++++++++++++++++++ parm/metplus/PcpCombine.conf | 216 ++++ parm/metplus/PcpCombine_fcst_APCP.conf | 130 --- parm/metplus/PcpCombine_fcst_ASNOW.conf | 141 --- parm/metplus/PcpCombine_obs_APCP.conf | 139 --- parm/metplus/PointStat_ADPSFC.conf | 378 ------- parm/metplus/PointStat_ADPUPA.conf | 343 ------- parm/metplus/PointStat_ensmean.conf | 566 +++++++++++ parm/metplus/PointStat_ensmean_ADPSFC.conf | 252 ----- parm/metplus/PointStat_ensmean_ADPUPA.conf | 319 ------ parm/metplus/PointStat_ensprob.conf | 524 ++++++++++ parm/metplus/PointStat_ensprob_ADPSFC.conf | 415 -------- parm/metplus/PointStat_ensprob_ADPUPA.conf | 523 ---------- parm/metplus/metplus_macros.jinja | 150 +++ parm/metplus/vx_config_det.yaml | 204 ++++ parm/metplus/vx_config_ens.yaml | 54 + parm/wflow/verify_det.yaml | 38 +- parm/wflow/verify_ens.yaml | 64 +- parm/wflow/verify_pre.yaml | 8 +- scripts/exregional_check_post_output.sh | 9 +- scripts/exregional_parse_vx_config.sh | 94 ++ ...onal_run_met_genensprod_or_ensemblestat.sh | 101 +- ...gional_run_met_gridstat_or_pointstat_vx.sh | 90 +- ...un_met_gridstat_or_pointstat_vx_ensmean.sh | 90 +- ...un_met_gridstat_or_pointstat_vx_ensprob.sh | 44 +- scripts/exregional_run_met_pb2nc_obs.sh | 10 +- scripts/exregional_run_met_pcpcombine.sh | 70 +- ush/metplus/decouple_fcst_obs_vx_config.py | 436 ++++++++ ush/set_vx_fhr_list.sh | 2 +- 60 files changed, 6040 insertions(+), 9779 deletions(-) create mode 100755 jobs/JREGIONAL_PARSE_VX_CONFIG create mode 100644 parm/metplus/EnsembleStat.conf delete mode 100644 parm/metplus/EnsembleStat_ADPSFC.conf delete mode 100644 parm/metplus/EnsembleStat_ADPUPA.conf delete mode 100644 parm/metplus/EnsembleStat_APCP.conf delete mode 100644 parm/metplus/EnsembleStat_ASNOW.conf delete mode 100644 parm/metplus/EnsembleStat_REFC.conf delete mode 100644 parm/metplus/EnsembleStat_RETOP.conf create mode 100644 parm/metplus/GenEnsProd.conf delete mode 100644 parm/metplus/GenEnsProd_ADPSFC.conf delete mode 100644 parm/metplus/GenEnsProd_ADPUPA.conf delete mode 100644 parm/metplus/GenEnsProd_APCP.conf delete mode 100644 parm/metplus/GenEnsProd_ASNOW.conf delete mode 100644 parm/metplus/GenEnsProd_REFC.conf delete mode 100644 parm/metplus/GenEnsProd_RETOP.conf delete mode 100644 parm/metplus/GridStat_APCP.conf delete mode 100644 parm/metplus/GridStat_ASNOW.conf delete mode 100644 parm/metplus/GridStat_REFC.conf delete mode 100644 parm/metplus/GridStat_RETOP.conf create mode 100644 parm/metplus/GridStat_ensmean.conf delete mode 100644 parm/metplus/GridStat_ensmean_APCP.conf delete mode 100644 parm/metplus/GridStat_ensmean_ASNOW.conf delete mode 100644 parm/metplus/GridStat_ensmean_REFC.conf delete mode 100644 parm/metplus/GridStat_ensmean_RETOP.conf create mode 100644 parm/metplus/GridStat_ensprob.conf delete mode 100644 parm/metplus/GridStat_ensprob_APCP.conf delete mode 100644 parm/metplus/GridStat_ensprob_ASNOW.conf delete mode 100644 parm/metplus/GridStat_ensprob_REFC.conf delete mode 100644 parm/metplus/GridStat_ensprob_RETOP.conf create mode 100644 parm/metplus/GridStat_or_PointStat.conf create mode 100644 parm/metplus/PcpCombine.conf delete mode 100644 parm/metplus/PcpCombine_fcst_APCP.conf delete mode 100644 parm/metplus/PcpCombine_fcst_ASNOW.conf delete mode 100644 parm/metplus/PcpCombine_obs_APCP.conf delete mode 100644 parm/metplus/PointStat_ADPSFC.conf delete mode 100644 parm/metplus/PointStat_ADPUPA.conf create mode 100644 parm/metplus/PointStat_ensmean.conf delete mode 100644 parm/metplus/PointStat_ensmean_ADPSFC.conf delete mode 100644 parm/metplus/PointStat_ensmean_ADPUPA.conf create mode 100644 parm/metplus/PointStat_ensprob.conf delete mode 100644 parm/metplus/PointStat_ensprob_ADPSFC.conf delete mode 100644 parm/metplus/PointStat_ensprob_ADPUPA.conf create mode 100644 parm/metplus/metplus_macros.jinja create mode 100644 parm/metplus/vx_config_det.yaml create mode 100644 parm/metplus/vx_config_ens.yaml create mode 100755 scripts/exregional_parse_vx_config.sh create mode 100755 ush/metplus/decouple_fcst_obs_vx_config.py diff --git a/jobs/JREGIONAL_CHECK_POST_OUTPUT b/jobs/JREGIONAL_CHECK_POST_OUTPUT index a6403ebe1f..f55f730cf4 100755 --- a/jobs/JREGIONAL_CHECK_POST_OUTPUT +++ b/jobs/JREGIONAL_CHECK_POST_OUTPUT @@ -51,10 +51,11 @@ print_info_msg " Entering script: \"${scrfunc_fn}\" In directory: \"${scrfunc_dir}\" -This is the J-job script for the task that checks that all the post- -processed output files in fact exist and are at least a certain age. -These files may have been generated by UPP as part of the current SRW -App workflow, or they may be user-staged. +This is the J-job for the task that checks that no more than +NUM_MISSING_FCST_FILES_MAX of each forecast's (ensemble member's) post- +processed output files are missing. Note that such files may have been +generated by UPP as part of the current SRW App workflow, or they may be +user-staged. ========================================================================" # #----------------------------------------------------------------------- @@ -70,9 +71,9 @@ Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." # #----------------------------------------------------------------------- # -# Create a flag file to make rocoto aware that the make_grid task has -# successfully completed (so that other tasks that depend on it can be -# launched). +# Create a flag file to make rocoto aware that the check_post_output task +# has successfully completed (so that other tasks that depend on it can +# be launched). # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_PARSE_VX_CONFIG b/jobs/JREGIONAL_PARSE_VX_CONFIG new file mode 100755 index 0000000000..c1cbba8e34 --- /dev/null +++ b/jobs/JREGIONAL_PARSE_VX_CONFIG @@ -0,0 +1,97 @@ +#!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# +# +#----------------------------------------------------------------------- +# + +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +. $USHdir/source_util_funcs.sh +source_config_for_task "task_parse_vx_config" ${GLOBAL_VAR_DEFNS_FP} +. $USHdir/job_preamble.sh +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# +{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the J-job script for the task that reads in the \"coupled\" yaml +verification (vx) configuration file (python dictionary) and generates +from it two \"decoupled\" vx configuration dictionaries, one for forecasts +and another for observations. The task then writes these two decoupled +dictionaries to a new configuration file in the experiment directory +that can be read by downstream vx tasks. + +Note: +The \"coupled\" vx configuration file contains items (dictionary keys and +values representing field names, levels, and thresholds) that consist of +both the forecast and the observation value for that item separated by a +delimiter string. Thus, they first need to be separated (decoupled) into +a value for forecasts and one for the observations before they can be +further processed. +========================================================================" +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job and pass to it the necessary varia- +# bles. +# +#----------------------------------------------------------------------- +# +$SCRIPTSdir/exregional_parse_vx_config.sh || \ +print_err_msg_exit "\ +Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." +# +#----------------------------------------------------------------------- +# +# Run job postamble. +# +#----------------------------------------------------------------------- +# +job_postamble +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# +{ restore_shell_opts; } > /dev/null 2>&1 + diff --git a/jobs/JREGIONAL_RUN_MET_PB2NC_OBS b/jobs/JREGIONAL_RUN_MET_PB2NC_OBS index e36e72418f..2767ae1146 100755 --- a/jobs/JREGIONAL_RUN_MET_PB2NC_OBS +++ b/jobs/JREGIONAL_RUN_MET_PB2NC_OBS @@ -51,8 +51,8 @@ print_info_msg " Entering script: \"${scrfunc_fn}\" In directory: \"${scrfunc_dir}\" -This is the J-job script for the task that runs METplus for point-stat -by initialization time for all forecast hours. +This is the J-job script for the task that runs METplus for pb2nc on +NDAS observations. ========================================================================" # #----------------------------------------------------------------------- diff --git a/parm/metplus/EnsembleStat.conf b/parm/metplus/EnsembleStat.conf new file mode 100644 index 0000000000..1ca46b961e --- /dev/null +++ b/parm/metplus/EnsembleStat.conf @@ -0,0 +1,759 @@ +# {{MetplusToolName}} METplus Configuration + +[config] + +# List of applications (tools) to run. +PROCESS_LIST = {{MetplusToolName}} + +# time looping - options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +LOOP_BY = INIT + +# Format of INIT_BEG and INIT_END using % items +# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. +# see www.strftime.org for more information +# %Y%m%d%H expands to YYYYMMDDHH +INIT_TIME_FMT = %Y%m%d%H + +# Start time for METplus run - must match INIT_TIME_FMT +INIT_BEG = {{cdate}} + +# End time for METplus run - must match INIT_TIME_FMT +INIT_END = {{cdate}} + +# Increment between METplus runs (in seconds if no units are specified). +# Must be >= 60 seconds. +INIT_INCREMENT = 3600 + +# List of forecast leads to process for each run time (init or valid) +# In hours if units are not specified +# If unset, defaults to 0 (don't loop through forecast leads) +LEAD_SEQ = {{fhr_list}} +# +# Order of loops to process data - Options are times, processes +# Not relevant if only one item is in the PROCESS_LIST +# times = run all wrappers in the PROCESS_LIST for a single run time, then +# increment the run time and run all wrappers again until all times have +# been evaluated. +# processes = run the first wrapper in the PROCESS_LIST for all times +# specified, then repeat for the next item in the PROCESS_LIST until all +# wrappers have been run +# +LOOP_ORDER = times +# +# Specify the name of the METplus log file. +# +LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} +# +# Specify the location and name of the final METplus conf file. +# +METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}} +# +# Location of MET configuration file to pass to {{MetplusToolName}}. +# +# References PARM_BASE, which is the location of the parm directory +# corresponding to the ush directory of the run_metplus.py script that +# is called or the value of the environment variable METPLUS_PARM_BASE +# if set. +# +{{METPLUS_TOOL_NAME}}_CONFIG_FILE = {PARM_BASE}/met_config/{{MetplusToolName}}Config_wrapped +# +# Name to identify model (forecast) data in output. +# +MODEL = {{vx_fcst_model_name}} + +{{METPLUS_TOOL_NAME}}_DESC = NA +# +# Name to identify observation data in output. +# +OBTYPE = {{obtype}} +# +# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. +# +LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} +# +# Observation data time window(s). +# +{%- if input_field_group in ['APCP', 'ASNOW'] %} +OBS_FILE_WINDOW_BEGIN = 0 +OBS_FILE_WINDOW_END = 0 +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_BEGIN = 0 +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_END = 0 +{%- elif input_field_group in ['REFC', 'RETOP'] %} +OBS_FILE_WINDOW_BEGIN = -300 +OBS_FILE_WINDOW_END = 300 +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_BEGIN = 0 +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_END = 0 +{%- elif input_field_group in ['ADPSFC', 'ADPUPA'] %} +OBS_WINDOW_BEGIN = -1799 +OBS_WINDOW_END = 1800 +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_END = {OBS_WINDOW_END} +{%- endif %} + +# number of expected members for ensemble. Should correspond with the +# number of items in the list for FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE +{{METPLUS_TOOL_NAME}}_N_MEMBERS = {{num_ens_members}} + +# ens.ens_thresh value in the MET config file +# threshold for ratio of valid files to expected files to allow app to run +{{METPLUS_TOOL_NAME}}_ENS_THRESH = 0.05 + +# ens.vld_thresh value in the MET config file +{{METPLUS_TOOL_NAME}}_ENS_VLD_THRESH = 1.0 + +{%- if input_field_group in ['ADPSFC', 'ADPUPA'] %} + +{{METPLUS_TOOL_NAME}}_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA +#{{METPLUS_TOOL_NAME}}_OBS_QUALITY_EXC = +{%- endif %} + +# {{METPLUS_TOOL_NAME}}_MET_OBS_ERR_TABLE is not required. +# If the variable is not defined, or the value is not set, then the MET +# default is used. +{%- if input_field_group in ['APCP', 'ASNOW'] %} +{{METPLUS_TOOL_NAME}}_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt +{%- elif input_field_group in ['REFC', 'RETOP'] %} +{{METPLUS_TOOL_NAME}}_MET_OBS_ERR_TABLE = +{%- elif input_field_group in ['ADPSFC', 'ADPUPA'] %} +{{METPLUS_TOOL_NAME}}_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt +{%- endif %} + + +# Used in the MET config file for: regrid to_grid field +{%- set comment_or_null = '' %} +{%- set regrid_to_grid = '' %} +{%- set regrid_method = '' %} +{%- if input_field_group in ['APCP', 'ASNOW'] %} + {%- set comment_or_null = '' %} + {%- set regrid_to_grid = 'FCST' %} + {%- set regrid_method = 'BUDGET' %} +{%- elif input_field_group in ['REFC', 'RETOP'] %} + {%- set comment_or_null = '' %} + {%- set regrid_to_grid = 'FCST' %} + {%- set regrid_method = 'BUDGET' %} +{%- elif input_field_group in ['ADPSFC', 'ADPUPA'] %} + {%- set comment_or_null = '#' %} + {%- set regrid_to_grid = 'NONE' %} + {%- set regrid_method = 'BILIN' %} +{%- endif %} +{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = {{regrid_to_grid}} +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_REGRID_METHOD = {{regrid_method}} +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2 +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_REGRID_VLD_THRESH = 0.5 +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_REGRID_SHAPE = SQUARE + +{{METPLUS_TOOL_NAME}}_CENSOR_THRESH = +{{METPLUS_TOOL_NAME}}_CENSOR_VAL = +{% if input_field_group in ['APCP', 'ASNOW'] %} +{{METPLUS_TOOL_NAME}}_MESSAGE_TYPE = +{{METPLUS_TOOL_NAME}}_DUPLICATE_FLAG = UNIQUE +{{METPLUS_TOOL_NAME}}_SKIP_CONST = TRUE +{{METPLUS_TOOL_NAME}}_OBS_ERROR_FLAG = TRUE +{%- elif input_field_group in ['REFC', 'RETOP'] %} +# Should this parameter be set to something other than ADPSFC (maybe +# just leave empty) since we are not verifying surface fields? +{{METPLUS_TOOL_NAME}}_MESSAGE_TYPE = ADPSFC +{{METPLUS_TOOL_NAME}}_DUPLICATE_FLAG = NONE +{{METPLUS_TOOL_NAME}}_SKIP_CONST = TRUE +{{METPLUS_TOOL_NAME}}_OBS_ERROR_FLAG = FALSE +{%- elif input_field_group in ['ADPSFC', 'ADPUPA'] %} +{{METPLUS_TOOL_NAME}}_MESSAGE_TYPE = {{input_field_group}} +{{METPLUS_TOOL_NAME}}_DUPLICATE_FLAG = NONE +{{METPLUS_TOOL_NAME}}_SKIP_CONST = FALSE +{{METPLUS_TOOL_NAME}}_OBS_ERROR_FLAG = FALSE +{%- endif %} + +{{METPLUS_TOOL_NAME}}_ENS_SSVAR_BIN_SIZE = 1.0 +{{METPLUS_TOOL_NAME}}_ENS_PHIST_BIN_SIZE = 0.05 + +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FILE_NAME = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FIELD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_WIDTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_SHAPE = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_MATCH_MONTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_DAY_INTERVAL = 31 +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_HOUR_INTERVAL = 6 + +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FILE_NAME = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FIELD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_WIDTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_SHAPE = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_TIME_INTERP_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_MATCH_MONTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_DAY_INTERVAL = 31 +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_HOUR_INTERVAL = 6 + +{% set comment_or_null = '' %} +{%- if input_field_group in ['APCP', 'ASNOW'] %} + {%- set comment_or_null = '' %} +{%- elif input_field_group in ['REFC', 'RETOP'] %} + {%- set comment_or_null = '' %} +{%- elif input_field_group in ['ADPSFC', 'ADPUPA'] %} + {%- set comment_or_null = '#' %} +{%- endif %} +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1 +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False +{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = False +{% if input_field_group in ['APCP', 'ASNOW'] %} +{{METPLUS_TOOL_NAME}}_MASK_GRID = +{%- elif input_field_group in ['REFC', 'RETOP'] %} +{{METPLUS_TOOL_NAME}}_MASK_GRID = FULL +{%- elif input_field_group in ['ADPSFC', 'ADPUPA'] %} +{{METPLUS_TOOL_NAME}}_MASK_GRID = +{%- endif %} + +{{METPLUS_TOOL_NAME}}_CI_ALPHA = 0.05 + +{{METPLUS_TOOL_NAME}}_INTERP_FIELD = BOTH +{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = 1.0 +{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = SQUARE +{{METPLUS_TOOL_NAME}}_INTERP_METHOD = NEAREST +{{METPLUS_TOOL_NAME}}_INTERP_WIDTH = 1 + +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECNT = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_RPS = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_RHIST = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PHIST = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ORANK = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SSVAR = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_RELP = STAT + +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_RANK = FALSE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_WEIGHT = FALSE +# +# Forecast and observation variables and levels as specified in the fcst +# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME, +# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME, +# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION. +# +{#- +Import the file containing jinja macros. +#} +{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %} + +{#- +Jinja requires certain variables to be defined globally within the template +before they can be used in if-statements and other scopes (see Jinja +scoping rules). Define such variables. +#} +{%- set level_fcst = '' %} +{%- set level_obs = '' %} +{%- set indx_level_fcst = '' %} + +{%- set valid_threshes_fcst = [] %} +{%- set valid_threshes_obs = [] %} +{%- set threshes_fcst = [] %} +{%- set threshes_obs = [] %} +{%- set indx_input_thresh_fcst = '' %} + +{%- set opts_indent = '' %} +{%- set opts_indent_len = '' %} +{%- set tmp = '' %} +{%- set error_msg = '' %} +{#- +Make sure that the set of field groups for forecasts and observations +are identical. +#} +{%- set fgs_fcst = vx_config_dict['fcst'].keys()|list %} +{%- set fgs_obs = vx_config_dict['obs'].keys()|list %} +{%- if (fgs_fcst != fgs_obs) %} + {%- set error_msg = '\n' ~ +'The list of valid field groups for forecasts (fgs_fcst) must be identical\n' ~ +'to that for observations (fgs_obs) but isn\'t:\n' ~ +' fgs_fcst = ' ~ fgs_fcst ~ '\n' ~ +' fgs_obs = ' ~ fgs_obs %} + {{metplus_macros.print_err_and_quit(error_msg)}} +{%- endif %} + +{#- +Extract the lists of forecast and observation dictionaries containing +the valid fields, levels, and thresholds corresponding to the specified +field group (input_field_group). Note that it would be simpler to have +these be just dictionaries in which the keys are the field names (instead +of them being LISTS of dictionaries in which each dictionary contains a +single key that is the field name), but that approach cannot be used here +because it is possible for field names to be repeated (for both forecasts +and observations). For example, in the observations, the field name +'PRWE' appears more than once, each time with a different threshold, and +the combination of name and threshold is what constitutes a unique field, +not just the name by itself. +#} +{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %} +{%- set fields_levels_threshes_obs = vx_config_dict['obs'][input_field_group] %} + +{#- +Reset the specified forecast level so that if it happens to be an +accumulation (e.g. 'A03'), the leading zeros in front of the hour are +stipped out (e.g. reset to 'A3'). +#} +{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} + +{#- +Ensure that the specified input forecast level(s) (input_level_fcst) and +threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the +set(s) of valid forecast levels and thresholds, respectively, specified +in fields_levels_threshes_fcst. +#} +{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }} +{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }} + +{#- +For convenience, create lists of valid forecast and observation field +names. +#} +{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %} +{%- set valid_fields_fcst = [] %} +{%- for i in range(0,num_valid_fields_fcst) %} + {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %} + {%- set tmp = valid_fields_fcst.append(field) %} +{%- endfor %} + +{%- set valid_fields_obs = [] %} +{%- set num_valid_fields_obs = fields_levels_threshes_obs|length %} +{%- for i in range(0,num_valid_fields_obs) %} + {%- set field = fields_levels_threshes_obs[i].keys()|list|join('') %} + {%- set tmp = valid_fields_obs.append(field) %} +{%- endfor %} + +{#- +Ensure that the number of valid fields for forecasts is equal to that +for the observations. +#} +{%- set num_valid_fields = 0 %} +{%- if (num_valid_fields_fcst != num_valid_fields_obs) %} + {%- set error_msg = '\n' ~ +'The number of valid forecast fields (num_valid_fields_fcst) must be\n' ~ +'equal to the number of valid observation fields (num_valid_fields_obs)\n' ~ +'but isn\'t:\n' ~ +' num_valid_fields_fcst = ' ~ num_valid_fields_fcst ~ '\n' ~ +' num_valid_fields_obs = ' ~ num_valid_fields_obs ~ '\n' ~ +'The lists of valid forecast and observation fields are:\n' ~ +' valid_fields_fcst = ' ~ valid_fields_fcst ~ '\n' ~ +' valid_fields_obs = ' ~ valid_fields_obs ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} +{%- else %} + {%- set num_valid_fields = num_valid_fields_fcst %} +{%- endif %} + +{#- +Loop over the valid fields and set field names, levels, thresholds, and/ +or options for each field, both for forecasts and for obseratiions, in +the METplus configuration file. +#} +{%- set ns = namespace(var_count = 0) %} +{%- for i in range(0,num_valid_fields) %} + + {%- set field_fcst = valid_fields_fcst[i] %} + {%- set field_obs = valid_fields_obs[i] %} + +{#- +For convenience, create lists of valid forecast and observation levels +for the current field. Then check that the number of valid levels for +forecasts is the same as that for observations. +#} + {%- set valid_levels_fcst = fields_levels_threshes_fcst[i][field_fcst].keys()|list %} + {%- set valid_levels_obs = fields_levels_threshes_obs[i][field_obs].keys()|list %} + +{#- +Extract dictionary of valid forecast levels (the dictionary keys) and +corresponding lists of valid thresholds (the values) for each level. +Then loop over these levels and corresponding lists of thresholds to set +both the forecast and observation field names, levels, thresholds, and/or +options. +#} + {%- set valid_levels_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst] %} + {%- for level_fcst, valid_threshes_fcst in valid_levels_threshes_fcst.items() %} + + {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %} +{#- +Increment the METplus variable counter. +#} + {%- set ns.var_count = ns.var_count+1 %} + +{#- +Set forecast field name. Note that this has to exactly match the name +of the field in the input forecast file. + +For accumulated fields, the input forecast file is generated by MET's +PcpCombine tool. In that file, the field name consists of the forecast +field name here (field_fcst) with the accumulation period appended to +it (separated by an underscore), so we must do the same here to get an +exact match. +#} + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{accum_hh}} + {%- else %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}} + {%- endif %} + +{#- +Set forecast field level. +#} +FCST_VAR{{ns.var_count}}_LEVELS = {{level_fcst}} + +{#- +Set forecast field threshold(s). Note that no forecast thresholds are +included in the METplus configuration file if input_thresh_fcst is set +to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +If input_thresh_fcst is set to 'all', set the list of forecast thresholds +to the full set of valid values. +#} + {%- if (input_thresh_fcst == 'all') %} + + {%- set threshes_fcst = valid_threshes_fcst %} +{#- +If input_thresh_fcst is set to a specific value: + 1) Ensure that input_thresh_fcst exists in the list of valid forecast + thresholds. + 2) Get the index of input_thresh_fcst in the list of valid forecast + thresholds. This will be needed later below when setting the + observation threshold(s). + 3) Use this index to set the forecast threshold to a one-element list + containing the specified forecast threshold. +#} + {%- else %} + + {%- if input_thresh_fcst not in valid_threshes_fcst %} + {%- set error_msg = '\n' ~ +'For the current forecast field (field_fcst) and forecast level (level_fcst),\n' ~ +'the input forecast threshold (input_thresh_fcst) does not exist in the list\n' ~ +'of valid forecast thresholds (valid_threshes_fcst):\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' level_fcst = ' ~ level_fcst ~ '\n' ~ +' valid_threshes_fcst = ' ~ valid_threshes_fcst ~ '\n' ~ +' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %} + {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %} + + {%- endif %} +{#- +If threshes_fcst has been reset to something other than its default +value of an empty list, then set the forecast thresholds in the METplus +configuration file because that implies threshes_fcst was set above to +a non-empty value. Then reset threshes_fcst to its default value for +proper processing of thresholds for the next field. +#} + {%- if (threshes_fcst != []) %} +FCST_VAR{{ns.var_count}}_THRESH = {{threshes_fcst|join(', ')}} + {%- endif %} + {%- set threshes_fcst = [] %} + + {%- endif %} + +{#- +Set forecast field options. +#} + {%- set opts_indent_len = 20 %} + {%- if (ns.var_count > 9) and (ns.var_count <= 99) %} + {%- set opts_indent_len = opts_indent_len + 1 %} + {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %} + {%- set opts_indent_len = opts_indent_len + 2 %} + {%- elif (ns.var_count > 999) %} + {%- set opts_indent_len = opts_indent_len + 3 %} + {%- endif %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'APCP' %} + + {%- if field_fcst == 'APCP' %} +FCST_VAR{{ns.var_count}}_OPTIONS = ens_ssvar_bin_size = 50.0; +{{opts_indent}}ens_phist_bin_size = 0.05; + {%- endif %} + + {%- elif input_field_group == 'ASNOW' %} + + {%- if field_fcst == 'ASNOW' %} +FCST_VAR{{ns.var_count}}_OPTIONS = ens_ssvar_bin_size = 50.0; +{{opts_indent}}ens_phist_bin_size = 0.05; + {%- endif %} + + {%- elif input_field_group == 'REFC' %} + + {%- if field_fcst == 'REFC' %} +FCST_VAR{{ns.var_count}}_OPTIONS = ens_ssvar_bin_size = 50.0; +{{opts_indent}}ens_phist_bin_size = 0.05; + {%- endif %} + + {%- elif input_field_group == 'RETOP' %} + + {%- if field_fcst == 'RETOP' %} +FCST_VAR{{ns.var_count}}_OPTIONS = convert(x) = x * 3.28084 * 0.001; ;; Convert from meters to kilofeet. +{{opts_indent}}ens_ssvar_bin_size = 50.0; +{{opts_indent}}ens_phist_bin_size = 0.05; + {%- endif %} + + {%- elif input_field_group == 'ADPSFC' %} + + {%- if field_fcst == 'HGT' %} +FCST_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 215; +{{opts_indent}}desc = "CEILING"; + {%- elif field_fcst == 'TCDC' %} +FCST_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 200; +{{opts_indent}}GRIB2_ipdtmpl_index=[27]; +{{opts_indent}}GRIB2_ipdtmpl_val=[255]; +{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- elif field_fcst == 'VIS' %} +FCST_VAR{{ns.var_count}}_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- elif field_fcst == 'WIND' %} +FCST_VAR{{ns.var_count}}_OPTIONS = GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind. + {%- endif %} + + {%- elif input_field_group == 'ADPUPA' %} + + {%- if field_fcst == 'CAPE' %} +FCST_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; + {%- endif %} + + {%- endif %} + +{#- +Set observation field name. Note that this has to exactly match the name +of the field in the input observation file. + +For accumulated fields, the input observation file is generated by MET's +PcpCombine tool. In that file, the field name consists of the observation +field name here (field_obs) with the accumulation period appended to it +(separated by an underscore), so we must do the same here to get an exact +match. + +Note: +Turns out for ASNOW, PcpCombine is not run for obs, so we exclude that +from the "if" clause here (so it goes into the "else"). For workflow +behavior uniformity between APCP and ASNOW, consider running PcpCombine +for ASNOW observations as well (just as it's run for APCP observations). + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +#} + {%- if (input_field_group in ['APCP']) %} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}}_{{accum_hh}} + {%- else %} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}} + {%- endif %} + +{#- +Set observation field level. +#} + {%- set indx_level_fcst = valid_levels_fcst.index(level_fcst) %} + {%- set level_obs = valid_levels_obs[indx_level_fcst] %} +OBS_VAR{{ns.var_count}}_LEVELS = {{level_obs}} + +{#- +Set observation field threshold(s). Note that no observation thresholds +are included in the METplus configuration file if input_thresh_fcst is +set to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +Set the list of valid observation thresholds to the one corresponding to +the current observation level (level_obs). +#} + {%- set valid_threshes_obs = fields_levels_threshes_obs[i][field_obs][level_obs] %} +{#- +If input_thresh_fcst is set to 'all', set the list of observation thresholds +to the full set of valid values. +#} + {%- if (input_thresh_fcst == 'all') %} + + {%- set threshes_obs = valid_threshes_obs %} +{#- +If input_thresh_fcst is set to a specific forecast threshold, then the +observation threshold is given by the element in the list of valid +observation thresholds that has the same index as that of input_thresh_fcst +in the list of valid forecast thresholds. +#} + {%- else %} + {%- set threshes_obs = [valid_threshes_obs[indx_input_thresh_fcst]] %} + {%- endif %} +{#- +If threshes_obs has been reset to something other than its default value +of an empty list, then set the observation thresholds in the METplus +configuration file because that implies threshes_obs was set above to +a non-empty value. Then reset threshes_obs to its default value for +proper processing of thresholds for the next field. +#} + {%- if (threshes_obs != []) %} +OBS_VAR{{ns.var_count}}_THRESH = {{threshes_obs|join(', ')}} + {%- endif %} + {%- set threshes_obs = [] %} + + {%- endif %} + +{#- +Set observation field options. +#} + {%- set opts_indent_len = opts_indent_len - 1 %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'APCP' %} + + {%- if field_obs == 'APCP' %} +OBS_VAR{{ns.var_count}}_OPTIONS = {FCST_VAR{{ns.var_count}}_OPTIONS} + {%- endif %} + + {%- elif input_field_group == 'ASNOW' %} + + {%- if field_obs == 'ASNOW' %} +OBS_VAR{{ns.var_count}}_OPTIONS = {FCST_VAR{{ns.var_count}}_OPTIONS}; +{{opts_indent}}convert(x) = 100.0*x; + {%- endif %} + + {%- elif input_field_group == 'REFC' %} + + {%- if field_obs == 'MergedReflectivityQCComposite' %} +OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = lt-20; +{{opts_indent}}censor_val = -20.0; +{{opts_indent}}cnt_thresh = [ >15 ]; +{{opts_indent}}cnt_logic = UNION; +{{opts_indent}}ens_ssvar_bin_size = 50.0; +{{opts_indent}}ens_phist_bin_size = 0.05; + {%- endif %} + + {%- elif input_field_group == 'RETOP' %} + + {%- if field_obs == 'EchoTop18' %} +OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = lt-20; +{{opts_indent}}censor_val = -20.0; +{{opts_indent}}cnt_thresh = [ >15 ]; +{{opts_indent}}cnt_logic = UNION; +{{opts_indent}}convert(x) = x * 3280.84 * 0.001; ;; Convert from kilometers to kilofeet. +{{opts_indent}}ens_ssvar_bin_size = 50.0; +{{opts_indent}}ens_phist_bin_size = 0.05; + {%- endif %} + + {%- elif input_field_group == 'ADPSFC' %} + + {%- if field_obs in ['DPT', 'TMP', 'WIND'] %} +OBS_VAR{{ns.var_count}}_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } + {%- elif field_obs == 'CEILING' %} +OBS_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 215 + {%- endif %} + + {%- elif input_field_group == 'ADPUPA' %} + + {%- if field_obs in ['DPT', 'HGT', 'TMP', 'WIND'] %} +OBS_VAR{{ns.var_count}}_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } + {%- elif field_obs == 'CAPE' %} +OBS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; +{{opts_indent}}cnt_logic = UNION; + {%- elif field_obs == 'PBL' %} +OBS_VAR{{ns.var_count}}_OPTIONS = desc = "TKE"; + {%- endif %} + + {%- endif %} +{#- +Print out a newline to separate the settings for the current field (both +forecast and observation settings) from those for the next field. +#} + {{- '\n' }} + + {%- endif %} + + {%- endfor %} +{%- endfor %} +[dir] +# +# OUTPUT_BASE apparently has to be set to something; it cannot be left +# to its default value. But it is not explicitly used elsewhere in this +# configuration file. +# +OUTPUT_BASE = {{output_base}} +# +# Point observation input directory for {{MetplusToolName}}. +# +{%- if input_field_group in ['ADPSFC', 'ADPUPA'] %} +OBS_{{METPLUS_TOOL_NAME}}_POINT_INPUT_DIR = {{obs_input_dir}} +{%- else %} +OBS_{{METPLUS_TOOL_NAME}}_POINT_INPUT_DIR = +{%- endif %} +# +# Grid observation input directory for {{MetplusToolName}}. +# +{%- if input_field_group in ['ADPSFC', 'ADPUPA'] %} +OBS_{{METPLUS_TOOL_NAME}}_GRID_INPUT_DIR = +{%- else %} +OBS_{{METPLUS_TOOL_NAME}}_GRID_INPUT_DIR = {{obs_input_dir}} +{%- endif %} +# +# Forecast model input directory for {{MetplusToolName}}. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}} +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used +# in this example +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR = +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used +# in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR = +# +# Output directory for {{MetplusToolName}}. +# +{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}} +# +# Directory for staging data. +# +STAGING_DIR = {{staging_dir}} + +# End of [dir] section and start of [filename_templates] section. +[filename_templates] +# +# Template for point observation input to {{MetplusToolName}} relative to +# OBS_{{METPLUS_TOOL_NAME}}_POINT_INPUT_DIR. +# +{%- if input_field_group in ['ADPSFC', 'ADPUPA'] %} +OBS_{{METPLUS_TOOL_NAME}}_POINT_INPUT_TEMPLATE = {{obs_input_fn_template}} +{%- else %} +OBS_{{METPLUS_TOOL_NAME}}_POINT_INPUT_TEMPLATE = +{%- endif %} +# +# Template for gridded observation input to {{MetplusToolName}} relative to +# OBS_{{METPLUS_TOOL_NAME}}_GRID_INPUT_DIR. +# +{%- if input_field_group in ['ADPSFC', 'ADPUPA'] %} +OBS_{{METPLUS_TOOL_NAME}}_GRID_INPUT_TEMPLATE = +{%- else %} +OBS_{{METPLUS_TOOL_NAME}}_GRID_INPUT_TEMPLATE = {{obs_input_fn_template}} +{%- endif %} +# +# Template for forecast input to {{MetplusToolName}} relative to +# FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +# Note that this can be a comma separated list of ensemble members +# or a single line, - filename wildcard characters may be used, ? or *. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}} +# +# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR. +# +{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE = +{{METPLUS_TOOL_NAME}}_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_TEMPLATE = +# +# Variable used to specify one or more verification mask files for +# {{MetplusToolName}}. Not used for this example. +# +{{METPLUS_TOOL_NAME}}_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/EnsembleStat_ADPSFC.conf b/parm/metplus/EnsembleStat_ADPSFC.conf deleted file mode 100644 index 07238030c1..0000000000 --- a/parm/metplus/EnsembleStat_ADPSFC.conf +++ /dev/null @@ -1,307 +0,0 @@ -# EnsembleStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = EnsembleStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {ENSEMBLE_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to EnsembleStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -ENSEMBLE_STAT_CONFIG_FILE = {PARM_BASE}/met_config/EnsembleStatConfig_wrapped -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -ENSEMBLE_STAT_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Observation data time window(s). -# -OBS_WINDOW_BEGIN = -1799 -OBS_WINDOW_END = 1800 -OBS_ENSEMBLE_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} -OBS_ENSEMBLE_STAT_WINDOW_END = {OBS_WINDOW_END} - -# number of expected members for ensemble. Should correspond with the -# number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE -ENSEMBLE_STAT_N_MEMBERS = {{num_ens_members}} - -# ens.ens_thresh value in the MET config file -# threshold for ratio of valid files to expected files to allow app to run -ENSEMBLE_STAT_ENS_THRESH = 0.05 - -# ens.vld_thresh value in the MET config file -ENSEMBLE_STAT_ENS_VLD_THRESH = 1.0 - -ENSEMBLE_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA -#ENSEMBLE_STAT_OBS_QUALITY_EXC = - -# ENSEMBLE_STAT_MET_OBS_ERR_TABLE is not required. -# If the variable is not defined, or the value is not set, then the MET -# default is used. -ENSEMBLE_STAT_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt - - -# Used in the MET config file for: regrid to_grid field -ENSEMBLE_STAT_REGRID_TO_GRID = NONE -#ENSEMBLE_STAT_REGRID_METHOD = BILIN -#ENSEMBLE_STAT_REGRID_WIDTH = 2 -#ENSEMBLE_STAT_REGRID_VLD_THRESH = 0.5 -#ENSEMBLE_STAT_REGRID_SHAPE = SQUARE - -ENSEMBLE_STAT_CENSOR_THRESH = -ENSEMBLE_STAT_CENSOR_VAL = - -ENSEMBLE_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} -ENSEMBLE_STAT_DUPLICATE_FLAG = NONE -ENSEMBLE_STAT_SKIP_CONST = FALSE -ENSEMBLE_STAT_OBS_ERROR_FLAG = FALSE - -ENSEMBLE_STAT_ENS_SSVAR_BIN_SIZE = 1.0 -ENSEMBLE_STAT_ENS_PHIST_BIN_SIZE = 0.05 - -#ENSEMBLE_STAT_CLIMO_MEAN_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_MEAN_FIELD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_MEAN_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_MEAN_HOUR_INTERVAL = 6 - -#ENSEMBLE_STAT_CLIMO_STDEV_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_STDEV_FIELD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_STDEV_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_STDEV_HOUR_INTERVAL = 6 - - -#ENSEMBLE_STAT_CLIMO_CDF_BINS = 1 -#ENSEMBLE_STAT_CLIMO_CDF_CENTER_BINS = False -ENSEMBLE_STAT_CLIMO_CDF_WRITE_BINS = False - -ENSEMBLE_STAT_MASK_GRID = - -ENSEMBLE_STAT_CI_ALPHA = 0.05 - -ENSEMBLE_STAT_INTERP_FIELD = BOTH -ENSEMBLE_STAT_INTERP_VLD_THRESH = 1.0 -ENSEMBLE_STAT_INTERP_SHAPE = SQUARE -ENSEMBLE_STAT_INTERP_METHOD = NEAREST -ENSEMBLE_STAT_INTERP_WIDTH = 1 - -ENSEMBLE_STAT_OUTPUT_FLAG_ECNT = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RPS = NONE -ENSEMBLE_STAT_OUTPUT_FLAG_RHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_PHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_ORANK = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_SSVAR = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RELP = STAT - -ENSEMBLE_STAT_ENSEMBLE_FLAG_RANK = FALSE -ENSEMBLE_STAT_ENSEMBLE_FLAG_WEIGHT = FALSE -# -# Forecast and observation variables and levels as specified in the fcst -# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME, -# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME, -# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION. -# -FCST_VAR1_NAME = TMP -FCST_VAR1_LEVELS = Z2 -FCST_VAR1_THRESH = ge268, ge273, ge278, ge293, ge298, ge303 -OBS_VAR1_NAME = TMP -OBS_VAR1_LEVELS = Z2 -OBS_VAR1_THRESH = ge268, ge273, ge278, ge293, ge298, ge303 -OBS_VAR1_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR2_NAME = DPT -FCST_VAR2_LEVELS = Z2 -FCST_VAR2_THRESH = ge263, ge268, ge273, ge288, ge293, ge298 -OBS_VAR2_NAME = DPT -OBS_VAR2_LEVELS = Z2 -OBS_VAR2_THRESH = ge263, ge268, ge273, ge288, ge293, ge298 -OBS_VAR2_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR3_NAME = WIND -FCST_VAR3_LEVELS = Z10 -FCST_VAR3_THRESH = ge5, ge10, ge15 -FCST_VAR3_OPTIONS = GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind. -OBS_VAR3_NAME = WIND -OBS_VAR3_LEVELS = Z10 -OBS_VAR3_THRESH = ge5, ge10, ge15 -OBS_VAR3_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR4_NAME = TCDC -FCST_VAR4_LEVELS = L0 -FCST_VAR4_THRESH = lt25, gt75 -FCST_VAR4_OPTIONS = GRIB_lvl_typ = 200; - GRIB2_ipdtmpl_index=[27]; - GRIB2_ipdtmpl_val=[255]; - interp = { type = [ { method = NEAREST; width = 1; } ]; } -OBS_VAR4_NAME = TCDC -OBS_VAR4_LEVELS = L0 -OBS_VAR4_THRESH = lt25, gt75 - -FCST_VAR5_NAME = VIS -FCST_VAR5_LEVELS = L0 -FCST_VAR5_THRESH = lt1609, lt8045, ge8045 -FCST_VAR5_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } -OBS_VAR5_NAME = VIS -OBS_VAR5_LEVELS = L0 -OBS_VAR5_THRESH = lt1609, lt8045, ge8045 - -FCST_VAR6_NAME = HGT -FCST_VAR6_LEVELS = L0 -FCST_VAR6_THRESH = lt152, lt1520, ge914 -FCST_VAR6_OPTIONS = GRIB_lvl_typ = 215; - desc = "CEILING"; -OBS_VAR6_NAME = CEILING -OBS_VAR6_LEVELS = L0 -OBS_VAR6_THRESH = lt152, lt305, ge914 -OBS_VAR6_OPTIONS = GRIB_lvl_typ = 215 - -[dir] -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -# -# Point observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_DIR = {{obs_input_dir}} -# -# Grid observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_DIR = -# -# Forecast model input directory for EnsembleStat. -# -FCST_ENSEMBLE_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Output directory for EnsembleStat. -# -ENSEMBLE_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for point observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_POINT_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for gridded observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_GRID_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_TEMPLATE = -# -# Template for forecast input to EnsembleStat relative to -# FCST_ENSEMBLE_STAT_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from EnsembleStat relative to ENSEMBLE_STAT_OUTPUT_DIR. -# -ENSEMBLE_STAT_OUTPUT_TEMPLATE = -ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# EnsembleStat. Not used for this example. -# -ENSEMBLE_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/EnsembleStat_ADPUPA.conf b/parm/metplus/EnsembleStat_ADPUPA.conf deleted file mode 100644 index edfda41b89..0000000000 --- a/parm/metplus/EnsembleStat_ADPUPA.conf +++ /dev/null @@ -1,351 +0,0 @@ -# EnsembleStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = EnsembleStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {ENSEMBLE_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to EnsembleStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -ENSEMBLE_STAT_CONFIG_FILE = {PARM_BASE}/met_config/EnsembleStatConfig_wrapped -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -ENSEMBLE_STAT_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Observation data time window(s). -# -OBS_WINDOW_BEGIN = -1799 -OBS_WINDOW_END = 1800 -OBS_ENSEMBLE_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} -OBS_ENSEMBLE_STAT_WINDOW_END = {OBS_WINDOW_END} - -# number of expected members for ensemble. Should correspond with the -# number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE -ENSEMBLE_STAT_N_MEMBERS = {{num_ens_members}} - -# ens.ens_thresh value in the MET config file -# threshold for ratio of valid files to expected files to allow app to run -ENSEMBLE_STAT_ENS_THRESH = 0.05 - -# ens.vld_thresh value in the MET config file -ENSEMBLE_STAT_ENS_VLD_THRESH = 1.0 - -ENSEMBLE_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA -#ENSEMBLE_STAT_OBS_QUALITY_EXC = - -# ENSEMBLE_STAT_MET_OBS_ERR_TABLE is not required. -# If the variable is not defined, or the value is not set, then the MET -# default is used. -ENSEMBLE_STAT_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt - - -# Used in the MET config file for: regrid to_grid field -ENSEMBLE_STAT_REGRID_TO_GRID = NONE -#ENSEMBLE_STAT_REGRID_METHOD = BILIN -#ENSEMBLE_STAT_REGRID_WIDTH = 2 -#ENSEMBLE_STAT_REGRID_VLD_THRESH = 0.5 -#ENSEMBLE_STAT_REGRID_SHAPE = SQUARE - -ENSEMBLE_STAT_CENSOR_THRESH = -ENSEMBLE_STAT_CENSOR_VAL = - -ENSEMBLE_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} -ENSEMBLE_STAT_DUPLICATE_FLAG = NONE -ENSEMBLE_STAT_SKIP_CONST = FALSE -ENSEMBLE_STAT_OBS_ERROR_FLAG = FALSE - -ENSEMBLE_STAT_ENS_SSVAR_BIN_SIZE = 1.0 -ENSEMBLE_STAT_ENS_PHIST_BIN_SIZE = 0.05 - -#ENSEMBLE_STAT_CLIMO_MEAN_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_MEAN_FIELD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_MEAN_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_MEAN_HOUR_INTERVAL = 6 - -#ENSEMBLE_STAT_CLIMO_STDEV_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_STDEV_FIELD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_STDEV_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_STDEV_HOUR_INTERVAL = 6 - - -#ENSEMBLE_STAT_CLIMO_CDF_BINS = 1 -#ENSEMBLE_STAT_CLIMO_CDF_CENTER_BINS = False -ENSEMBLE_STAT_CLIMO_CDF_WRITE_BINS = False - -ENSEMBLE_STAT_MASK_GRID = - -ENSEMBLE_STAT_CI_ALPHA = 0.05 - -ENSEMBLE_STAT_INTERP_FIELD = BOTH -ENSEMBLE_STAT_INTERP_VLD_THRESH = 1.0 -ENSEMBLE_STAT_INTERP_SHAPE = SQUARE -ENSEMBLE_STAT_INTERP_METHOD = NEAREST -ENSEMBLE_STAT_INTERP_WIDTH = 1 - -ENSEMBLE_STAT_OUTPUT_FLAG_ECNT = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RPS = NONE -ENSEMBLE_STAT_OUTPUT_FLAG_RHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_PHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_ORANK = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_SSVAR = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RELP = STAT - -ENSEMBLE_STAT_ENSEMBLE_FLAG_RANK = FALSE -ENSEMBLE_STAT_ENSEMBLE_FLAG_WEIGHT = FALSE -# -# Forecast and observation variables and levels as specified in the fcst -# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME, -# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME, -# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION. -# -FCST_VAR1_NAME = TMP -FCST_VAR1_LEVELS = P850 -FCST_VAR1_THRESH = ge288, ge293, ge298 -OBS_VAR1_NAME = TMP -OBS_VAR1_LEVELS = P850 -OBS_VAR1_THRESH = ge288, ge293, ge298 -OBS_VAR1_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR2_NAME = TMP -FCST_VAR2_LEVELS = P700 -FCST_VAR2_THRESH = ge273, ge278, ge283 -OBS_VAR2_NAME = TMP -OBS_VAR2_LEVELS = P700 -OBS_VAR2_THRESH = ge273, ge278, ge283 -OBS_VAR2_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR3_NAME = TMP -FCST_VAR3_LEVELS = P500 -FCST_VAR3_THRESH = ge258, ge263, ge268 -OBS_VAR3_NAME = TMP -OBS_VAR3_LEVELS = P500 -OBS_VAR3_THRESH = ge258, ge263, ge268 -OBS_VAR3_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR4_NAME = DPT -FCST_VAR4_LEVELS = P850 -FCST_VAR4_THRESH = ge273, ge278, ge283 -OBS_VAR4_NAME = DPT -OBS_VAR4_LEVELS = P850 -OBS_VAR4_THRESH = ge273, ge278, ge283 -OBS_VAR4_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR5_NAME = DPT -FCST_VAR5_LEVELS = P700 -FCST_VAR5_THRESH = ge263, ge286, ge273 -OBS_VAR5_NAME = DPT -OBS_VAR5_LEVELS = P700 -OBS_VAR5_THRESH = ge263, ge286, ge273 -OBS_VAR5_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR6_NAME = WIND -FCST_VAR6_LEVELS = P850 -FCST_VAR6_THRESH = ge5, ge10, ge15 -OBS_VAR6_NAME = WIND -OBS_VAR6_LEVELS = P850 -OBS_VAR6_THRESH = ge5, ge10, ge15 -OBS_VAR6_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR7_NAME = WIND -FCST_VAR7_LEVELS = P700 -FCST_VAR7_THRESH = ge10, ge15, ge20 -OBS_VAR7_NAME = WIND -OBS_VAR7_LEVELS = P700 -OBS_VAR7_THRESH = ge10, ge15, ge20 -OBS_VAR7_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR8_NAME = WIND -FCST_VAR8_LEVELS = P500 -FCST_VAR8_THRESH = ge15, ge21, ge26 -OBS_VAR8_NAME = WIND -OBS_VAR8_LEVELS = P500 -OBS_VAR8_THRESH = ge15, ge21, ge26 -OBS_VAR8_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR9_NAME = WIND -FCST_VAR9_LEVELS = P250 -FCST_VAR9_THRESH = ge26, ge31, ge36, ge46, ge62 -OBS_VAR9_NAME = WIND -OBS_VAR9_LEVELS = P250 -OBS_VAR9_THRESH = ge26, ge31, ge36, ge46, ge62 -OBS_VAR9_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR10_NAME = HGT -FCST_VAR10_LEVELS = P500 -FCST_VAR10_THRESH = ge5400, ge5600, ge5880 -OBS_VAR10_NAME = HGT -OBS_VAR10_LEVELS = P500 -OBS_VAR10_THRESH = ge5400, ge5600, ge5880 -OBS_VAR10_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR11_NAME = CAPE -FCST_VAR11_LEVELS = L0 -FCST_VAR11_THRESH = le1000, gt1000&<2500, ge2500&<4000, ge2500 -FCST_VAR11_OPTIONS = cnt_thresh = [ >0 ]; -OBS_VAR11_NAME = CAPE -OBS_VAR11_LEVELS = L0-100000 -OBS_VAR11_THRESH = le1000, gt1000&<2500, ge2500&<4000, ge2500 -OBS_VAR11_OPTIONS = cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -FCST_VAR12_NAME = HPBL -FCST_VAR12_LEVELS = Z0 -FCST_VAR12_THRESH = lt500, lt1500, gt1500 -OBS_VAR12_NAME = PBL -OBS_VAR12_LEVELS = L0 -OBS_VAR12_THRESH = lt500, lt1500, gt1500 -OBS_VAR12_OPTIONS = desc = "TKE"; - -[dir] -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -# -# Point observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_DIR = {{obs_input_dir}} -# -# Grid observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_DIR = -# -# Forecast model input directory for EnsembleStat. -# -FCST_ENSEMBLE_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Output directory for EnsembleStat. -# -ENSEMBLE_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for point observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_POINT_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for gridded observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_GRID_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_TEMPLATE = -# -# Template for forecast input to EnsembleStat relative to -# FCST_ENSEMBLE_STAT_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from EnsembleStat relative to ENSEMBLE_STAT_OUTPUT_DIR. -# -ENSEMBLE_STAT_OUTPUT_TEMPLATE = -ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# EnsembleStat. Not used for this example. -# -ENSEMBLE_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/EnsembleStat_APCP.conf b/parm/metplus/EnsembleStat_APCP.conf deleted file mode 100644 index 7604a90bd7..0000000000 --- a/parm/metplus/EnsembleStat_APCP.conf +++ /dev/null @@ -1,258 +0,0 @@ -# EnsembleStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = EnsembleStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {ENSEMBLE_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to EnsembleStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -ENSEMBLE_STAT_CONFIG_FILE = {PARM_BASE}/met_config/EnsembleStatConfig_wrapped -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -ENSEMBLE_STAT_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Observation data time window(s). -# -OBS_FILE_WINDOW_BEGIN = 0 -OBS_FILE_WINDOW_END = 0 -OBS_ENSEMBLE_STAT_WINDOW_BEGIN = 0 -OBS_ENSEMBLE_STAT_WINDOW_END = 0 - -# number of expected members for ensemble. Should correspond with the -# number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE -ENSEMBLE_STAT_N_MEMBERS = {{num_ens_members}} - -# ens.ens_thresh value in the MET config file -# threshold for ratio of valid files to expected files to allow app to run -ENSEMBLE_STAT_ENS_THRESH = 0.05 - -# ens.vld_thresh value in the MET config file -ENSEMBLE_STAT_ENS_VLD_THRESH = 1.0 - -# ENSEMBLE_STAT_MET_OBS_ERR_TABLE is not required. -# If the variable is not defined, or the value is not set, then the MET -# default is used. -ENSEMBLE_STAT_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt - - -# Used in the MET config file for: regrid to_grid field -ENSEMBLE_STAT_REGRID_TO_GRID = FCST -ENSEMBLE_STAT_REGRID_METHOD = BUDGET -ENSEMBLE_STAT_REGRID_WIDTH = 2 -ENSEMBLE_STAT_REGRID_VLD_THRESH = 0.5 -ENSEMBLE_STAT_REGRID_SHAPE = SQUARE - -ENSEMBLE_STAT_CENSOR_THRESH = -ENSEMBLE_STAT_CENSOR_VAL = - -ENSEMBLE_STAT_MESSAGE_TYPE = -ENSEMBLE_STAT_DUPLICATE_FLAG = UNIQUE -ENSEMBLE_STAT_SKIP_CONST = TRUE -ENSEMBLE_STAT_OBS_ERROR_FLAG = TRUE - -ENSEMBLE_STAT_ENS_SSVAR_BIN_SIZE = 1.0 -ENSEMBLE_STAT_ENS_PHIST_BIN_SIZE = 0.05 - -#ENSEMBLE_STAT_CLIMO_MEAN_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_MEAN_FIELD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_MEAN_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_MEAN_HOUR_INTERVAL = 6 - -#ENSEMBLE_STAT_CLIMO_STDEV_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_STDEV_FIELD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_STDEV_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_STDEV_HOUR_INTERVAL = 6 - - -ENSEMBLE_STAT_CLIMO_CDF_BINS = 1 -ENSEMBLE_STAT_CLIMO_CDF_CENTER_BINS = False -ENSEMBLE_STAT_CLIMO_CDF_WRITE_BINS = False - -ENSEMBLE_STAT_MASK_GRID = - -ENSEMBLE_STAT_CI_ALPHA = 0.05 - -ENSEMBLE_STAT_INTERP_FIELD = BOTH -ENSEMBLE_STAT_INTERP_VLD_THRESH = 1.0 -ENSEMBLE_STAT_INTERP_SHAPE = SQUARE -ENSEMBLE_STAT_INTERP_METHOD = NEAREST -ENSEMBLE_STAT_INTERP_WIDTH = 1 - -ENSEMBLE_STAT_OUTPUT_FLAG_ECNT = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RPS = NONE -ENSEMBLE_STAT_OUTPUT_FLAG_RHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_PHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_ORANK = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_SSVAR = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RELP = STAT - -ENSEMBLE_STAT_ENSEMBLE_FLAG_RANK = FALSE -ENSEMBLE_STAT_ENSEMBLE_FLAG_WEIGHT = FALSE -# -# Forecast and observation variables and levels as specified in the fcst -# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME, -# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME, -# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION. -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -FCST_VAR1_LEVELS = A{{accum_hh}} -FCST_VAR1_OPTIONS = ens_ssvar_bin_size = 50.0; - ens_phist_bin_size = 0.05; -OBS_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR1_LEVELS = A{{accum_hh}} -OBS_VAR1_OPTIONS = {FCST_VAR1_OPTIONS} - -[dir] -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -# -# Point observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_DIR = -# -# Grid observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_DIR = {{obs_input_dir}} -# -# Forecast model input directory for EnsembleStat. -# -FCST_ENSEMBLE_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Output directory for EnsembleStat. -# -ENSEMBLE_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for point observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_POINT_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_TEMPLATE = -# -# Template for gridded observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_GRID_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to EnsembleStat relative to -# FCST_ENSEMBLE_STAT_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from EnsembleStat relative to ENSEMBLE_STAT_OUTPUT_DIR. -# -ENSEMBLE_STAT_OUTPUT_TEMPLATE = -ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# EnsembleStat. Not used for this example. -# -ENSEMBLE_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/EnsembleStat_ASNOW.conf b/parm/metplus/EnsembleStat_ASNOW.conf deleted file mode 100644 index 8897b03295..0000000000 --- a/parm/metplus/EnsembleStat_ASNOW.conf +++ /dev/null @@ -1,259 +0,0 @@ -# EnsembleStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = EnsembleStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {ENSEMBLE_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to EnsembleStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -ENSEMBLE_STAT_CONFIG_FILE = {PARM_BASE}/met_config/EnsembleStatConfig_wrapped -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -ENSEMBLE_STAT_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Observation data time window(s). -# -OBS_FILE_WINDOW_BEGIN = 0 -OBS_FILE_WINDOW_END = 0 -OBS_ENSEMBLE_STAT_WINDOW_BEGIN = 0 -OBS_ENSEMBLE_STAT_WINDOW_END = 0 - -# number of expected members for ensemble. Should correspond with the -# number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE -ENSEMBLE_STAT_N_MEMBERS = {{num_ens_members}} - -# ens.ens_thresh value in the MET config file -# threshold for ratio of valid files to expected files to allow app to run -ENSEMBLE_STAT_ENS_THRESH = 0.05 - -# ens.vld_thresh value in the MET config file -ENSEMBLE_STAT_ENS_VLD_THRESH = 1.0 - -# ENSEMBLE_STAT_MET_OBS_ERR_TABLE is not required. -# If the variable is not defined, or the value is not set, then the MET -# default is used. -ENSEMBLE_STAT_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt - - -# Used in the MET config file for: regrid to_grid field -ENSEMBLE_STAT_REGRID_TO_GRID = FCST -ENSEMBLE_STAT_REGRID_METHOD = BUDGET -ENSEMBLE_STAT_REGRID_WIDTH = 2 -ENSEMBLE_STAT_REGRID_VLD_THRESH = 0.5 -ENSEMBLE_STAT_REGRID_SHAPE = SQUARE - -ENSEMBLE_STAT_CENSOR_THRESH = -ENSEMBLE_STAT_CENSOR_VAL = - -ENSEMBLE_STAT_MESSAGE_TYPE = -ENSEMBLE_STAT_DUPLICATE_FLAG = UNIQUE -ENSEMBLE_STAT_SKIP_CONST = TRUE -ENSEMBLE_STAT_OBS_ERROR_FLAG = TRUE - -ENSEMBLE_STAT_ENS_SSVAR_BIN_SIZE = 1.0 -ENSEMBLE_STAT_ENS_PHIST_BIN_SIZE = 0.05 - -#ENSEMBLE_STAT_CLIMO_MEAN_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_MEAN_FIELD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_MEAN_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_MEAN_HOUR_INTERVAL = 6 - -#ENSEMBLE_STAT_CLIMO_STDEV_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_STDEV_FIELD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_STDEV_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_STDEV_HOUR_INTERVAL = 6 - - -ENSEMBLE_STAT_CLIMO_CDF_BINS = 1 -ENSEMBLE_STAT_CLIMO_CDF_CENTER_BINS = False -ENSEMBLE_STAT_CLIMO_CDF_WRITE_BINS = False - -ENSEMBLE_STAT_MASK_GRID = - -ENSEMBLE_STAT_CI_ALPHA = 0.05 - -ENSEMBLE_STAT_INTERP_FIELD = BOTH -ENSEMBLE_STAT_INTERP_VLD_THRESH = 1.0 -ENSEMBLE_STAT_INTERP_SHAPE = SQUARE -ENSEMBLE_STAT_INTERP_METHOD = NEAREST -ENSEMBLE_STAT_INTERP_WIDTH = 1 - -ENSEMBLE_STAT_OUTPUT_FLAG_ECNT = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RPS = NONE -ENSEMBLE_STAT_OUTPUT_FLAG_RHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_PHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_ORANK = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_SSVAR = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RELP = STAT - -ENSEMBLE_STAT_ENSEMBLE_FLAG_RANK = FALSE -ENSEMBLE_STAT_ENSEMBLE_FLAG_WEIGHT = FALSE -# -# Forecast and observation variables and levels as specified in the fcst -# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME, -# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME, -# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION. -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -FCST_VAR1_LEVELS = A{{accum_hh}} -FCST_VAR1_OPTIONS = ens_ssvar_bin_size = 50.0; - ens_phist_bin_size = 0.05; -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = A{{accum_hh}} -OBS_VAR1_OPTIONS = {FCST_VAR1_OPTIONS}; - convert(x) = 100.0*x; - -[dir] -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -# -# Point observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_DIR = -# -# Grid observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_DIR = {{obs_input_dir}} -# -# Forecast model input directory for EnsembleStat. -# -FCST_ENSEMBLE_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Output directory for EnsembleStat. -# -ENSEMBLE_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for point observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_POINT_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_TEMPLATE = -# -# Template for gridded observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_GRID_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to EnsembleStat relative to -# FCST_ENSEMBLE_STAT_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from EnsembleStat relative to ENSEMBLE_STAT_OUTPUT_DIR. -# -ENSEMBLE_STAT_OUTPUT_TEMPLATE = -ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# EnsembleStat. Not used for this example. -# -ENSEMBLE_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/EnsembleStat_REFC.conf b/parm/metplus/EnsembleStat_REFC.conf deleted file mode 100644 index 6de6eddeb8..0000000000 --- a/parm/metplus/EnsembleStat_REFC.conf +++ /dev/null @@ -1,265 +0,0 @@ -# EnsembleStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = EnsembleStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {ENSEMBLE_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to EnsembleStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -ENSEMBLE_STAT_CONFIG_FILE = {PARM_BASE}/met_config/EnsembleStatConfig_wrapped -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -ENSEMBLE_STAT_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Observation data time window(s). -# -OBS_FILE_WINDOW_BEGIN = -300 -OBS_FILE_WINDOW_END = 300 -OBS_ENSEMBLE_STAT_WINDOW_BEGIN = 0 -OBS_ENSEMBLE_STAT_WINDOW_END = 0 - -# number of expected members for ensemble. Should correspond with the -# number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE -ENSEMBLE_STAT_N_MEMBERS = {{num_ens_members}} - -# ens.ens_thresh value in the MET config file -# threshold for ratio of valid files to expected files to allow app to run -ENSEMBLE_STAT_ENS_THRESH = 0.05 - -# ens.vld_thresh value in the MET config file -ENSEMBLE_STAT_ENS_VLD_THRESH = 1.0 - -# ENSEMBLE_STAT_MET_OBS_ERR_TABLE is not required. -# If the variable is not defined, or the value is not set, then the MET -# default is used. -ENSEMBLE_STAT_MET_OBS_ERR_TABLE = - - -# Used in the MET config file for: regrid to_grid field -ENSEMBLE_STAT_REGRID_TO_GRID = FCST -ENSEMBLE_STAT_REGRID_METHOD = BUDGET -ENSEMBLE_STAT_REGRID_WIDTH = 2 -ENSEMBLE_STAT_REGRID_VLD_THRESH = 0.5 -ENSEMBLE_STAT_REGRID_SHAPE = SQUARE - -ENSEMBLE_STAT_CENSOR_THRESH = -ENSEMBLE_STAT_CENSOR_VAL = - -# Should this parameter be set to something other than ADPSFC (maybe -# just leave empty) since we are not verifying surface fields? -ENSEMBLE_STAT_MESSAGE_TYPE = ADPSFC -ENSEMBLE_STAT_DUPLICATE_FLAG = NONE -ENSEMBLE_STAT_SKIP_CONST = TRUE -ENSEMBLE_STAT_OBS_ERROR_FLAG = FALSE - -ENSEMBLE_STAT_ENS_SSVAR_BIN_SIZE = 1.0 -ENSEMBLE_STAT_ENS_PHIST_BIN_SIZE = 0.05 - -#ENSEMBLE_STAT_CLIMO_MEAN_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_MEAN_FIELD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_MEAN_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_MEAN_HOUR_INTERVAL = 6 - -#ENSEMBLE_STAT_CLIMO_STDEV_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_STDEV_FIELD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_STDEV_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_STDEV_HOUR_INTERVAL = 6 - - -ENSEMBLE_STAT_CLIMO_CDF_BINS = 1 -ENSEMBLE_STAT_CLIMO_CDF_CENTER_BINS = False -ENSEMBLE_STAT_CLIMO_CDF_WRITE_BINS = False - -ENSEMBLE_STAT_MASK_GRID = FULL - -ENSEMBLE_STAT_CI_ALPHA = 0.05 - -ENSEMBLE_STAT_INTERP_FIELD = BOTH -ENSEMBLE_STAT_INTERP_VLD_THRESH = 1.0 -ENSEMBLE_STAT_INTERP_SHAPE = SQUARE -ENSEMBLE_STAT_INTERP_METHOD = NEAREST -ENSEMBLE_STAT_INTERP_WIDTH = 1 - -ENSEMBLE_STAT_OUTPUT_FLAG_ECNT = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RPS = NONE -ENSEMBLE_STAT_OUTPUT_FLAG_RHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_PHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_ORANK = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_SSVAR = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RELP = STAT - -ENSEMBLE_STAT_ENSEMBLE_FLAG_RANK = FALSE -ENSEMBLE_STAT_ENSEMBLE_FLAG_WEIGHT = FALSE -# -# Forecast and observation variables and levels as specified in the fcst -# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME, -# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME, -# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION. -# -FCST_VAR1_NAME = {{fieldname_in_fcst_input}} -FCST_VAR1_LEVELS = L0 -FCST_VAR1_OPTIONS = ens_ssvar_bin_size = 50.0; - ens_phist_bin_size = 0.05; -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = Z500 -OBS_VAR1_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - ens_ssvar_bin_size = 50.0; - ens_phist_bin_size = 0.05; - -[dir] -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -# -# Point observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_DIR = -# -# Grid observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_DIR = {{obs_input_dir}} -# -# Forecast model input directory for EnsembleStat. -# -FCST_ENSEMBLE_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Output directory for EnsembleStat. -# -ENSEMBLE_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for point observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_POINT_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_TEMPLATE = -# -# Template for gridded observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_GRID_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to EnsembleStat relative to -# FCST_ENSEMBLE_STAT_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from EnsembleStat relative to ENSEMBLE_STAT_OUTPUT_DIR. -# -ENSEMBLE_STAT_OUTPUT_TEMPLATE = -ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# EnsembleStat. Not used for this example. -# -ENSEMBLE_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/EnsembleStat_RETOP.conf b/parm/metplus/EnsembleStat_RETOP.conf deleted file mode 100644 index abd2dd2a45..0000000000 --- a/parm/metplus/EnsembleStat_RETOP.conf +++ /dev/null @@ -1,267 +0,0 @@ -# EnsembleStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = EnsembleStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {ENSEMBLE_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to EnsembleStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -ENSEMBLE_STAT_CONFIG_FILE = {PARM_BASE}/met_config/EnsembleStatConfig_wrapped -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -ENSEMBLE_STAT_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Observation data time window(s). -# -OBS_FILE_WINDOW_BEGIN = -300 -OBS_FILE_WINDOW_END = 300 -OBS_ENSEMBLE_STAT_WINDOW_BEGIN = 0 -OBS_ENSEMBLE_STAT_WINDOW_END = 0 - -# number of expected members for ensemble. Should correspond with the -# number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE -ENSEMBLE_STAT_N_MEMBERS = {{num_ens_members}} - -# ens.ens_thresh value in the MET config file -# threshold for ratio of valid files to expected files to allow app to run -ENSEMBLE_STAT_ENS_THRESH = 0.05 - -# ens.vld_thresh value in the MET config file -ENSEMBLE_STAT_ENS_VLD_THRESH = 1.0 - -# ENSEMBLE_STAT_MET_OBS_ERR_TABLE is not required. -# If the variable is not defined, or the value is not set, then the MET -# default is used. -ENSEMBLE_STAT_MET_OBS_ERR_TABLE = - - -# Used in the MET config file for: regrid to_grid field -ENSEMBLE_STAT_REGRID_TO_GRID = FCST -ENSEMBLE_STAT_REGRID_METHOD = BUDGET -ENSEMBLE_STAT_REGRID_WIDTH = 2 -ENSEMBLE_STAT_REGRID_VLD_THRESH = 0.5 -ENSEMBLE_STAT_REGRID_SHAPE = SQUARE - -ENSEMBLE_STAT_CENSOR_THRESH = -ENSEMBLE_STAT_CENSOR_VAL = - -# Should this parameter be set to something other than ADPSFC (maybe -# just leave empty) since we are not verifying surface fields? -ENSEMBLE_STAT_MESSAGE_TYPE = ADPSFC -ENSEMBLE_STAT_DUPLICATE_FLAG = NONE -ENSEMBLE_STAT_SKIP_CONST = TRUE -ENSEMBLE_STAT_OBS_ERROR_FLAG = FALSE - -ENSEMBLE_STAT_ENS_SSVAR_BIN_SIZE = 1.0 -ENSEMBLE_STAT_ENS_PHIST_BIN_SIZE = 0.05 - -#ENSEMBLE_STAT_CLIMO_MEAN_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_MEAN_FIELD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_MEAN_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_MEAN_HOUR_INTERVAL = 6 - -#ENSEMBLE_STAT_CLIMO_STDEV_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_STDEV_FIELD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_STDEV_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_STDEV_HOUR_INTERVAL = 6 - - -ENSEMBLE_STAT_CLIMO_CDF_BINS = 1 -ENSEMBLE_STAT_CLIMO_CDF_CENTER_BINS = False -ENSEMBLE_STAT_CLIMO_CDF_WRITE_BINS = False - -ENSEMBLE_STAT_MASK_GRID = FULL - -ENSEMBLE_STAT_CI_ALPHA = 0.05 - -ENSEMBLE_STAT_INTERP_FIELD = BOTH -ENSEMBLE_STAT_INTERP_VLD_THRESH = 1.0 -ENSEMBLE_STAT_INTERP_SHAPE = SQUARE -ENSEMBLE_STAT_INTERP_METHOD = NEAREST -ENSEMBLE_STAT_INTERP_WIDTH = 1 - -ENSEMBLE_STAT_OUTPUT_FLAG_ECNT = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RPS = NONE -ENSEMBLE_STAT_OUTPUT_FLAG_RHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_PHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_ORANK = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_SSVAR = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RELP = STAT - -ENSEMBLE_STAT_ENSEMBLE_FLAG_RANK = FALSE -ENSEMBLE_STAT_ENSEMBLE_FLAG_WEIGHT = FALSE -# -# Forecast and observation variables and levels as specified in the fcst -# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME, -# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME, -# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION. -# -FCST_VAR1_NAME = {{fieldname_in_fcst_input}} -FCST_VAR1_LEVELS = L0 -FCST_VAR1_OPTIONS = convert(x) = x * 3.28084 * 0.001; ;; Convert from meters to kilofeet. - ens_ssvar_bin_size = 50.0; - ens_phist_bin_size = 0.05; -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = Z500 -OBS_VAR1_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; ;; Convert from kilometers to kilofeet. - ens_ssvar_bin_size = 50.0; - ens_phist_bin_size = 0.05; - -[dir] -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -# -# Point observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_DIR = -# -# Grid observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_DIR = {{obs_input_dir}} -# -# Forecast model input directory for EnsembleStat. -# -FCST_ENSEMBLE_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Output directory for EnsembleStat. -# -ENSEMBLE_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for point observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_POINT_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_TEMPLATE = -# -# Template for gridded observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_GRID_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to EnsembleStat relative to -# FCST_ENSEMBLE_STAT_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from EnsembleStat relative to ENSEMBLE_STAT_OUTPUT_DIR. -# -ENSEMBLE_STAT_OUTPUT_TEMPLATE = -ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# EnsembleStat. Not used for this example. -# -ENSEMBLE_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GenEnsProd.conf b/parm/metplus/GenEnsProd.conf new file mode 100644 index 0000000000..7291ce02fa --- /dev/null +++ b/parm/metplus/GenEnsProd.conf @@ -0,0 +1,390 @@ +# {{MetplusToolName}} METplus Configuration + +[config] + +# List of applications (tools) to run. +PROCESS_LIST = {{MetplusToolName}} + +# time looping - options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +LOOP_BY = INIT + +# Format of INIT_BEG and INIT_END using % items +# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. +# see www.strftime.org for more information +# %Y%m%d%H expands to YYYYMMDDHH +INIT_TIME_FMT = %Y%m%d%H + +# Start time for METplus run - must match INIT_TIME_FMT +INIT_BEG = {{cdate}} + +# End time for METplus run - must match INIT_TIME_FMT +INIT_END = {{cdate}} + +# Increment between METplus runs (in seconds if no units are specified). +# Must be >= 60 seconds. +INIT_INCREMENT = 3600 + +# List of forecast leads to process for each run time (init or valid) +# In hours if units are not specified +# If unset, defaults to 0 (don't loop through forecast leads) +LEAD_SEQ = {{fhr_list}} +# +# Order of loops to process data - Options are times, processes +# Not relevant if only one item is in the PROCESS_LIST +# times = run all wrappers in the PROCESS_LIST for a single run time, then +# increment the run time and run all wrappers again until all times have +# been evaluated. +# processes = run the first wrapper in the PROCESS_LIST for all times +# specified, then repeat for the next item in the PROCESS_LIST until all +# wrappers have been run +# +LOOP_ORDER = times +# +# Specify the name of the METplus log file. +# +LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} +# +# Specify the location and name of the final METplus conf file. +# +METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}} +# +# Name to identify model (forecast) data in output. +# +MODEL = {{vx_fcst_model_name}} + +{{METPLUS_TOOL_NAME}}_DESC = NA +# +# Name to identify observation data in output. +# +OBTYPE = {{obtype}} +# +# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. +# +LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} + +### +# File I/O +### + +# +# Forecast model input directory for {{MetplusToolName}}. +# +{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}} +# +# Template for forecast input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +# Note that this can be a comma separated list of ensemble members +# or a single line, - filename wildcard characters may be used, ? or *. +# +{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}} + +# {{METPLUS_TOOL_NAME}}_CTRL_INPUT_DIR = {INPUT_BASE} +# {{METPLUS_TOOL_NAME}}_CTRL_INPUT_TEMPLATE = +# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2 + +# +# Output directory for {{MetplusToolName}}. +# +# OUTPUT_BASE apparently has to be set to something; it cannot be left +# to its default value. But it is not explicitly used elsewhere in this +# configuration file. +# +OUTPUT_BASE = {{output_base}} +{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}} +# +# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR. +# +{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE = {{metplus_tool_name}}_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc +# +# Directory for staging data. +# +STAGING_DIR = {{staging_dir}} +# +# There are n ensembles but 1 is used as control, so specify n-1 members. +# +{{METPLUS_TOOL_NAME}}_N_MEMBERS = {{num_ens_members}} + +### +# Field Info +### +# +# Ensemble variables and levels as specified in the ens field dictionary +# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS, +# (optional) ENS_VARn_OPTION +# +{#- +Import the file containing jinja macros. +#} +{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %} + +{#- +Jinja requires certain variables to be defined globally within the template +before they can be used in if-statements and other scopes (see Jinja +scoping rules). Define such variables. +#} +{%- set threshes_fcst = [] %} +{%- set indx_input_thresh_fcst = '' %} + +{%- set opts_indent = '' %} +{%- set opts_indent_len = '' %} +{%- set tmp = '' %} +{%- set error_msg = '' %} + +{#- +Extract the list of forecast dictionaries containing the valid fields, +levels, and thresholds corresponding to the specified field group +(input_field_group). +#} +{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %} + +{#- +Reset the specified forecast level so that if it happens to be an +accumulation (e.g. 'A03'), the leading zeros in front of the hour are +stipped out (e.g. reset to 'A3'). +#} +{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} + +{#- +Ensure that the specified input forecast level(s) (input_level_fcst) and +threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the +set(s) of valid forecast levels and thresholds, respectively, specified +in fields_levels_threshes_fcst. +#} +{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }} +{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }} + +{#- +For convenience, create lists of valid forecast field names. +#} +{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %} +{%- set valid_fields_fcst = [] %} +{%- for i in range(0,num_valid_fields_fcst) %} + {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %} + {%- set tmp = valid_fields_fcst.append(field) %} +{%- endfor %} + +{#- +Loop over the valid fields and set field names, levels, thresholds, and/ +or options for each forecast field. Note that GenEnsProd only deals with +forecasts; it does not need observations. +#} +{%- set ns = namespace(var_count = 0) %} +{%- for i in range(0,num_valid_fields_fcst) %} + + {%- set field_fcst = valid_fields_fcst[i] %} + +{#- +Extract dictionary of valid forecast levels (the dictionary keys) and +corresponding lists of valid thresholds (the values) for each level. +Then loop over these levels and corresponding lists of thresholds to set +the forecast field names, levels, thresholds, and/or options. +#} + {%- set valid_levels_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst] %} + {%- for level_fcst, valid_threshes_fcst in valid_levels_threshes_fcst.items() %} + + {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %} +{#- +Increment the METplus variable counter. +#} + {%- set ns.var_count = ns.var_count+1 %} + +{#- +Set forecast field name. Note that this has to exactly match the name +of the field in the input forecast file. + +For accumulated fields, the input forecast file is generated by MET's +PcpCombine tool. In that file, the field name consists of the forecast +field name here (field_fcst) with the accumulation period appended to +it (separated by an underscore), so we must do the same here to get an +exact match. +#} + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +ENS_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{accum_hh}} + {%- else %} +ENS_VAR{{ns.var_count}}_NAME = {{field_fcst}} + {%- endif %} + +{#- +Set forecast field level. +#} +ENS_VAR{{ns.var_count}}_LEVELS = {{level_fcst}} + +{#- +Set forecast field threshold(s). Note that no forecast thresholds are +included in the METplus configuration file if input_thresh_fcst is set +to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +If input_thresh_fcst is set to 'all', set the list of forecast thresholds +to the full set of valid values. +#} + {%- if (input_thresh_fcst == 'all') %} + + {%- set threshes_fcst = valid_threshes_fcst %} +{#- +If input_thresh_fcst is set to a specific value: + 1) Ensure that input_thresh_fcst exists in the list of valid forecast + thresholds. + 2) Get the index of input_thresh_fcst in the list of valid forecast + thresholds. This will be needed later below when setting the + observation threshold(s). + 3) Use this index to set the forecast threshold to a one-element list + containing the specified forecast threshold. +#} + {%- else %} + + {%- if input_thresh_fcst not in valid_threshes_fcst %} + {%- set error_msg = '\n' ~ +'For the current forecast field (field_fcst) and forecast level (level_fcst),\n' ~ +'the input forecast threshold (input_thresh_fcst) does not exist in the list\n' ~ +'of valid forecast thresholds (valid_threshes_fcst):\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' level_fcst = ' ~ level_fcst ~ '\n' ~ +' valid_threshes_fcst = ' ~ valid_threshes_fcst ~ '\n' ~ +' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %} + {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %} + + {%- endif %} +{#- +If threshes_fcst has been reset to something other than its default +value of an empty list, then set the forecast thresholds in the METplus +configuration file because that implies threshes_fcst was set above to +a non-empty value. Then reset threshes_fcst to its default value for +proper processing of thresholds for the next field. +#} + {%- if (threshes_fcst != []) %} +ENS_VAR{{ns.var_count}}_THRESH = {{threshes_fcst|join(', ')}} + {%- endif %} + {%- set threshes_fcst = [] %} + + {%- endif %} + +{#- +Set forecast field options. +#} + {%- set opts_indent_len = 19 %} + {%- if (ns.var_count > 9) and (ns.var_count <= 99) %} + {%- set opts_indent_len = opts_indent_len + 1 %} + {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %} + {%- set opts_indent_len = opts_indent_len + 2 %} + {%- elif (ns.var_count > 999) %} + {%- set opts_indent_len = opts_indent_len + 3 %} + {%- endif %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'RETOP' %} + + {%- if field_fcst == 'RETOP' %} +ENS_VAR{{ns.var_count}}_OPTIONS = convert(x) = x * 3.28084 * 0.001; ;; Convert from meters to kilofeet. + {%- endif %} + + {%- elif input_field_group == 'ADPSFC' %} + + {%- if field_fcst == 'HGT' %} +ENS_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 215; +{{opts_indent}}desc = "CEILING"; + {%- elif field_fcst == 'TCDC' %} +ENS_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 200; +{{opts_indent}}GRIB2_ipdtmpl_index=[27]; +{{opts_indent}}GRIB2_ipdtmpl_val=[255]; +{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- elif field_fcst == 'VIS' %} +ENS_VAR{{ns.var_count}}_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- elif field_fcst == 'WIND' %} +ENS_VAR{{ns.var_count}}_OPTIONS = GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind. + {%- endif %} + + {%- elif input_field_group == 'ADPUPA' %} + + {%- if field_fcst == 'CAPE' %} +ENS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; + {%- endif %} + + {%- endif %} +{#- +Print out a newline to separate the settings for the current field (both +forecast and observation settings) from those for the next field. +#} + {{- '\n' }} + + {%- endif %} + + {%- endfor %} +{%- endfor %} +### +# {{MetplusToolName}} +### + +# {{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = NONE +# {{METPLUS_TOOL_NAME}}_REGRID_METHOD = NEAREST +# {{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 1 +# {{METPLUS_TOOL_NAME}}_REGRID_VLD_THRESH = 0.5 +# {{METPLUS_TOOL_NAME}}_REGRID_SHAPE = SQUARE + +# {{METPLUS_TOOL_NAME}}_CENSOR_THRESH = +# {{METPLUS_TOOL_NAME}}_CENSOR_VAL = +# {{METPLUS_TOOL_NAME}}_CAT_THRESH = +# {{METPLUS_TOOL_NAME}}_NC_VAR_STR = + +# Threshold for ratio of valid files to expected files to allow app to run +{{METPLUS_TOOL_NAME}}_ENS_THRESH = 0.05 + +{{METPLUS_TOOL_NAME}}_NBRHD_PROB_WIDTH = 27 +{{METPLUS_TOOL_NAME}}_NBRHD_PROB_SHAPE = CIRCLE +{{METPLUS_TOOL_NAME}}_NBRHD_PROB_VLD_THRESH = 0.0 + +# {{METPLUS_TOOL_NAME}}_NMEP_SMOOTH_VLD_THRESH = 0.0 +# {{METPLUS_TOOL_NAME}}_NMEP_SMOOTH_SHAPE = CIRCLE +# {{METPLUS_TOOL_NAME}}_NMEP_SMOOTH_GAUSSIAN_DX = 81.27 +# {{METPLUS_TOOL_NAME}}_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120 +# {{METPLUS_TOOL_NAME}}_NMEP_SMOOTH_METHOD = GAUSSIAN +# {{METPLUS_TOOL_NAME}}_NMEP_SMOOTH_WIDTH = 1 + +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FILE_NAME = +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FIELD = +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_METHOD = +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_WIDTH = +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_VLD_THRESH = +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_SHAPE = +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_MATCH_MONTH = +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_DAY_INTERVAL = 31 +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_HOUR_INTERVAL = 6 + +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FILE_NAME = +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FIELD = +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_METHOD = +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_WIDTH = +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_VLD_THRESH = +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_SHAPE = +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_TIME_INTERP_METHOD = +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_MATCH_MONTH = +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_DAY_INTERVAL = 31 +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_HOUR_INTERVAL = 6 + +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_LATLON = TRUE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_MEAN = TRUE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_STDEV = TRUE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_MINUS = FALSE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_PLUS = FALSE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_MIN = FALSE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_MAX = FALSE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_RANGE = TRUE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_VLD_COUNT = TRUE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_FREQUENCY = TRUE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_NEP = TRUE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_NMEP = TRUE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_CLIMO = FALSE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_CLIMO_CDF = FALSE + +# {{METPLUS_TOOL_NAME}}_ENS_MEMBER_IDS = +# {{METPLUS_TOOL_NAME}}_CONTROL_ID = diff --git a/parm/metplus/GenEnsProd_ADPSFC.conf b/parm/metplus/GenEnsProd_ADPSFC.conf deleted file mode 100644 index cb253f575b..0000000000 --- a/parm/metplus/GenEnsProd_ADPSFC.conf +++ /dev/null @@ -1,219 +0,0 @@ -# GenEnsProd METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GenEnsProd - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GEN_ENS_PROD_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -GEN_ENS_PROD_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} - -### -# File I/O -### - -# -# Forecast model input directory for GenEnsProd. -# -GEN_ENS_PROD_INPUT_DIR = {{fcst_input_dir}} -# -# Template for forecast input to GenEnsProd relative to -# GEN_ENS_PROD_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -GEN_ENS_PROD_INPUT_TEMPLATE = {{fcst_input_fn_template}} - -# GEN_ENS_PROD_CTRL_INPUT_DIR = {INPUT_BASE} -# GEN_ENS_PROD_CTRL_INPUT_TEMPLATE = -# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2 - -# -# Output directory for GenEnsProd. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GEN_ENS_PROD_OUTPUT_DIR = {{output_dir}} -# -# Template for output from GenEnsProd relative to GEN_ENS_PROD_OUTPUT_DIR. -# -GEN_ENS_PROD_OUTPUT_TEMPLATE = gen_ens_prod_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} -# -# There are n ensembles but 1 is used as control, so specify n-1 members. -# -GEN_ENS_PROD_N_MEMBERS = {{num_ens_members}} - -### -# Field Info -### -# -# Ensemble variables and levels as specified in the ens field dictionary -# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS, -# (optional) ENS_VARn_OPTION -# -ENS_VAR1_NAME = TMP -ENS_VAR1_LEVELS = Z02 -ENS_VAR1_THRESH = ge268, ge273, ge278, ge293, ge298, ge303 - -ENS_VAR2_NAME = DPT -ENS_VAR2_LEVELS = Z2 -ENS_VAR2_THRESH = ge263, ge268, ge273, ge288, ge293, ge298 - -ENS_VAR3_NAME = WIND -ENS_VAR3_LEVELS = Z10 -ENS_VAR3_THRESH = ge5, ge10, ge15 -ENS_VAR3_OPTIONS = GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind. - -ENS_VAR4_NAME = TCDC -ENS_VAR4_LEVELS = L0 -ENS_VAR4_THRESH = lt25, gt75 -ENS_VAR4_OPTIONS = GRIB_lvl_typ = 200; - GRIB2_ipdtmpl_index=[27]; - GRIB2_ipdtmpl_val=[255]; - interp = { type = [ { method = NEAREST; width = 1; } ]; } - -ENS_VAR5_NAME = VIS -ENS_VAR5_LEVELS = L0 -ENS_VAR5_THRESH = lt1609, lt8045, ge8045 -ENS_VAR5_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } - -ENS_VAR6_NAME = HGT -ENS_VAR6_LEVELS = L0 -ENS_VAR6_THRESH = lt152, lt1520, ge914 -ENS_VAR6_OPTIONS = GRIB_lvl_typ = 215; - desc = "CEILING"; - -### -# GenEnsProd -### - -# GEN_ENS_PROD_REGRID_TO_GRID = NONE -# GEN_ENS_PROD_REGRID_METHOD = NEAREST -# GEN_ENS_PROD_REGRID_WIDTH = 1 -# GEN_ENS_PROD_REGRID_VLD_THRESH = 0.5 -# GEN_ENS_PROD_REGRID_SHAPE = SQUARE - -# GEN_ENS_PROD_CENSOR_THRESH = -# GEN_ENS_PROD_CENSOR_VAL = -# GEN_ENS_PROD_CAT_THRESH = -# GEN_ENS_PROD_NC_VAR_STR = - -# Threshold for ratio of valid files to expected files to allow app to run -GEN_ENS_PROD_ENS_THRESH = 0.05 - -GEN_ENS_PROD_NBRHD_PROB_WIDTH = 27 -GEN_ENS_PROD_NBRHD_PROB_SHAPE = CIRCLE -GEN_ENS_PROD_NBRHD_PROB_VLD_THRESH = 0.0 - -# GEN_ENS_PROD_NMEP_SMOOTH_VLD_THRESH = 0.0 -# GEN_ENS_PROD_NMEP_SMOOTH_SHAPE = CIRCLE -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_DX = 81.27 -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120 -# GEN_ENS_PROD_NMEP_SMOOTH_METHOD = GAUSSIAN -# GEN_ENS_PROD_NMEP_SMOOTH_WIDTH = 1 - -# GEN_ENS_PROD_CLIMO_MEAN_FILE_NAME = -# GEN_ENS_PROD_CLIMO_MEAN_FIELD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_MEAN_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_MEAN_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_MEAN_HOUR_INTERVAL = 6 - -# GEN_ENS_PROD_CLIMO_STDEV_FILE_NAME = -# GEN_ENS_PROD_CLIMO_STDEV_FIELD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_STDEV_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_STDEV_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_STDEV_HOUR_INTERVAL = 6 - -GEN_ENS_PROD_ENSEMBLE_FLAG_LATLON = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MEAN = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_STDEV = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MINUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_PLUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MIN = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MAX = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_RANGE = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_VLD_COUNT = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_FREQUENCY = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NMEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO_CDF = FALSE - -# GEN_ENS_PROD_ENS_MEMBER_IDS = -# GEN_ENS_PROD_CONTROL_ID = diff --git a/parm/metplus/GenEnsProd_ADPUPA.conf b/parm/metplus/GenEnsProd_ADPUPA.conf deleted file mode 100644 index 863427752f..0000000000 --- a/parm/metplus/GenEnsProd_ADPUPA.conf +++ /dev/null @@ -1,236 +0,0 @@ -# GenEnsProd METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GenEnsProd - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GEN_ENS_PROD_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -GEN_ENS_PROD_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} - -### -# File I/O -### - -# -# Forecast model input directory for GenEnsProd. -# -GEN_ENS_PROD_INPUT_DIR = {{fcst_input_dir}} -# -# Template for forecast input to GenEnsProd relative to -# GEN_ENS_PROD_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -GEN_ENS_PROD_INPUT_TEMPLATE = {{fcst_input_fn_template}} - -# GEN_ENS_PROD_CTRL_INPUT_DIR = {INPUT_BASE} -# GEN_ENS_PROD_CTRL_INPUT_TEMPLATE = -# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2 - -# -# Output directory for GenEnsProd. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GEN_ENS_PROD_OUTPUT_DIR = {{output_dir}} -# -# Template for output from GenEnsProd relative to GEN_ENS_PROD_OUTPUT_DIR. -# -GEN_ENS_PROD_OUTPUT_TEMPLATE = gen_ens_prod_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} -# -# There are n ensembles but 1 is used as control, so specify n-1 members. -# -GEN_ENS_PROD_N_MEMBERS = {{num_ens_members}} - -### -# Field Info -### -# -# Ensemble variables and levels as specified in the ens field dictionary -# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS, -# (optional) ENS_VARn_OPTION -# -ENS_VAR1_NAME = TMP -ENS_VAR1_LEVELS = P850 -ENS_VAR1_THRESH = ge288, ge293, ge298 - -ENS_VAR2_NAME = TMP -ENS_VAR2_LEVELS = P700 -ENS_VAR2_THRESH = ge273, ge278, ge283 - -ENS_VAR3_NAME = TMP -ENS_VAR3_LEVELS = P500 -ENS_VAR3_THRESH = ge258, ge263, ge268 - -ENS_VAR4_NAME = DPT -ENS_VAR4_LEVELS = P850 -ENS_VAR4_THRESH = ge273, ge278, ge283 - -ENS_VAR5_NAME = DPT -ENS_VAR5_LEVELS = P700 -ENS_VAR5_THRESH = ge263, ge268, ge273 - -ENS_VAR6_NAME = WIND -ENS_VAR6_LEVELS = P850 -ENS_VAR6_THRESH = ge5, ge10, ge15 - -ENS_VAR7_NAME = WIND -ENS_VAR7_LEVELS = P700 -ENS_VAR7_THRESH = ge10, ge15, ge20 - -ENS_VAR8_NAME = WIND -ENS_VAR8_LEVELS = P500 -ENS_VAR8_THRESH = ge15, ge21, ge26 - -ENS_VAR9_NAME = WIND -ENS_VAR9_LEVELS = P250 -ENS_VAR9_THRESH = ge26, ge31, ge36, ge46, ge62 - -ENS_VAR10_NAME = HGT -ENS_VAR10_LEVELS = P500 -ENS_VAR10_THRESH = ge5400, ge5600, ge5880 - -ENS_VAR11_NAME = CAPE -ENS_VAR11_LEVELS = L0 -ENS_VAR11_THRESH = le1000, gt1000&<2500, gt2500&<4000, gt2500 -ENS_VAR11_OPTIONS = cnt_thresh = [ >0 ]; - -ENS_VAR12_NAME = HPBL -ENS_VAR12_LEVELS = Z0 -ENS_VAR12_THRESH = lt500, lt1500, gt1500 - -### -# GenEnsProd -### - -# GEN_ENS_PROD_REGRID_TO_GRID = NONE -# GEN_ENS_PROD_REGRID_METHOD = NEAREST -# GEN_ENS_PROD_REGRID_WIDTH = 1 -# GEN_ENS_PROD_REGRID_VLD_THRESH = 0.5 -# GEN_ENS_PROD_REGRID_SHAPE = SQUARE - -# GEN_ENS_PROD_CENSOR_THRESH = -# GEN_ENS_PROD_CENSOR_VAL = -# GEN_ENS_PROD_CAT_THRESH = -# GEN_ENS_PROD_NC_VAR_STR = - -# Threshold for ratio of valid files to expected files to allow app to run -GEN_ENS_PROD_ENS_THRESH = 0.05 - -GEN_ENS_PROD_NBRHD_PROB_WIDTH = 27 -GEN_ENS_PROD_NBRHD_PROB_SHAPE = CIRCLE -GEN_ENS_PROD_NBRHD_PROB_VLD_THRESH = 0.0 - -# GEN_ENS_PROD_NMEP_SMOOTH_VLD_THRESH = 0.0 -# GEN_ENS_PROD_NMEP_SMOOTH_SHAPE = CIRCLE -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_DX = 81.27 -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120 -# GEN_ENS_PROD_NMEP_SMOOTH_METHOD = GAUSSIAN -# GEN_ENS_PROD_NMEP_SMOOTH_WIDTH = 1 - -# GEN_ENS_PROD_CLIMO_MEAN_FILE_NAME = -# GEN_ENS_PROD_CLIMO_MEAN_FIELD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_MEAN_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_MEAN_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_MEAN_HOUR_INTERVAL = 6 - -# GEN_ENS_PROD_CLIMO_STDEV_FILE_NAME = -# GEN_ENS_PROD_CLIMO_STDEV_FIELD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_STDEV_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_STDEV_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_STDEV_HOUR_INTERVAL = 6 - -GEN_ENS_PROD_ENSEMBLE_FLAG_LATLON = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MEAN = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_STDEV = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MINUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_PLUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MIN = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MAX = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_RANGE = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_VLD_COUNT = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_FREQUENCY = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NMEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO_CDF = FALSE - -# GEN_ENS_PROD_ENS_MEMBER_IDS = -# GEN_ENS_PROD_CONTROL_ID = diff --git a/parm/metplus/GenEnsProd_APCP.conf b/parm/metplus/GenEnsProd_APCP.conf deleted file mode 100644 index 0d05843a87..0000000000 --- a/parm/metplus/GenEnsProd_APCP.conf +++ /dev/null @@ -1,191 +0,0 @@ -# GenEnsProd METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GenEnsProd - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GEN_ENS_PROD_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -GEN_ENS_PROD_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} - -### -# File I/O -### - -# -# Forecast model input directory for GenEnsProd. -# -GEN_ENS_PROD_INPUT_DIR = {{fcst_input_dir}} -# -# Template for forecast input to GenEnsProd relative to -# GEN_ENS_PROD_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -GEN_ENS_PROD_INPUT_TEMPLATE = {{fcst_input_fn_template}} - -# GEN_ENS_PROD_CTRL_INPUT_DIR = {INPUT_BASE} -# GEN_ENS_PROD_CTRL_INPUT_TEMPLATE = -# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2 - -# -# Output directory for GenEnsProd. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GEN_ENS_PROD_OUTPUT_DIR = {{output_dir}} -# -# Template for output from GenEnsProd relative to GEN_ENS_PROD_OUTPUT_DIR. -# -GEN_ENS_PROD_OUTPUT_TEMPLATE = gen_ens_prod_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} -# -# There are n ensembles but 1 is used as control, so specify n-1 members. -# -GEN_ENS_PROD_N_MEMBERS = {{num_ens_members}} - -### -# Field Info -### -# -# Ensemble variables and levels as specified in the ens field dictionary -# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS, -# (optional) ENS_VARn_OPTION -# -ENS_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -ENS_VAR1_LEVELS = A{{accum_hh}} -ENS_VAR1_THRESH = {{field_thresholds}} - -### -# GenEnsProd -### - -# GEN_ENS_PROD_REGRID_TO_GRID = NONE -# GEN_ENS_PROD_REGRID_METHOD = NEAREST -# GEN_ENS_PROD_REGRID_WIDTH = 1 -# GEN_ENS_PROD_REGRID_VLD_THRESH = 0.5 -# GEN_ENS_PROD_REGRID_SHAPE = SQUARE - -# GEN_ENS_PROD_CENSOR_THRESH = -# GEN_ENS_PROD_CENSOR_VAL = -# GEN_ENS_PROD_CAT_THRESH = -# GEN_ENS_PROD_NC_VAR_STR = - -# Threshold for ratio of valid files to expected files to allow app to run -GEN_ENS_PROD_ENS_THRESH = 0.05 - -GEN_ENS_PROD_NBRHD_PROB_WIDTH = 27 -GEN_ENS_PROD_NBRHD_PROB_SHAPE = CIRCLE -GEN_ENS_PROD_NBRHD_PROB_VLD_THRESH = 0.0 - -# GEN_ENS_PROD_NMEP_SMOOTH_VLD_THRESH = 0.0 -# GEN_ENS_PROD_NMEP_SMOOTH_SHAPE = CIRCLE -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_DX = 81.27 -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120 -# GEN_ENS_PROD_NMEP_SMOOTH_METHOD = GAUSSIAN -# GEN_ENS_PROD_NMEP_SMOOTH_WIDTH = 1 - -# GEN_ENS_PROD_CLIMO_MEAN_FILE_NAME = -# GEN_ENS_PROD_CLIMO_MEAN_FIELD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_MEAN_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_MEAN_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_MEAN_HOUR_INTERVAL = 6 - -# GEN_ENS_PROD_CLIMO_STDEV_FILE_NAME = -# GEN_ENS_PROD_CLIMO_STDEV_FIELD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_STDEV_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_STDEV_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_STDEV_HOUR_INTERVAL = 6 - -GEN_ENS_PROD_ENSEMBLE_FLAG_LATLON = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MEAN = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_STDEV = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MINUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_PLUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MIN = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MAX = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_RANGE = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_VLD_COUNT = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_FREQUENCY = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NMEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO_CDF = FALSE - -# GEN_ENS_PROD_ENS_MEMBER_IDS = -# GEN_ENS_PROD_CONTROL_ID = diff --git a/parm/metplus/GenEnsProd_ASNOW.conf b/parm/metplus/GenEnsProd_ASNOW.conf deleted file mode 100644 index ea9dac02d9..0000000000 --- a/parm/metplus/GenEnsProd_ASNOW.conf +++ /dev/null @@ -1,192 +0,0 @@ -# GenEnsProd METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GenEnsProd - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GEN_ENS_PROD_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -GEN_ENS_PROD_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} - -### -# File I/O -### - -# -# Forecast model input directory for GenEnsProd. -# -GEN_ENS_PROD_INPUT_DIR = {{fcst_input_dir}} -# -# Template for forecast input to GenEnsProd relative to -# GEN_ENS_PROD_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -GEN_ENS_PROD_INPUT_TEMPLATE = {{fcst_input_fn_template}} - -# GEN_ENS_PROD_CTRL_INPUT_DIR = {INPUT_BASE} -# GEN_ENS_PROD_CTRL_INPUT_TEMPLATE = -# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2 - -# -# Output directory for GenEnsProd. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GEN_ENS_PROD_OUTPUT_DIR = {{output_dir}} -# -# Template for output from GenEnsProd relative to GEN_ENS_PROD_OUTPUT_DIR. -# -GEN_ENS_PROD_OUTPUT_TEMPLATE = gen_ens_prod_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} -# -# There are n ensembles but 1 is used as control, so specify n-1 members. -# -GEN_ENS_PROD_N_MEMBERS = {{num_ens_members}} - -### -# Field Info -### - -# -# Ensemble variables and levels as specified in the ens field dictionary -# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS, -# (optional) ENS_VARn_OPTION -# -ENS_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -ENS_VAR1_LEVELS = A{{accum_hh}} -ENS_VAR1_THRESH = {{field_thresholds}} - -### -# GenEnsProd -### - -# GEN_ENS_PROD_REGRID_TO_GRID = NONE -# GEN_ENS_PROD_REGRID_METHOD = NEAREST -# GEN_ENS_PROD_REGRID_WIDTH = 1 -# GEN_ENS_PROD_REGRID_VLD_THRESH = 0.5 -# GEN_ENS_PROD_REGRID_SHAPE = SQUARE - -# GEN_ENS_PROD_CENSOR_THRESH = -# GEN_ENS_PROD_CENSOR_VAL = -# GEN_ENS_PROD_CAT_THRESH = -# GEN_ENS_PROD_NC_VAR_STR = - -# Threshold for ratio of valid files to expected files to allow app to run -GEN_ENS_PROD_ENS_THRESH = 0.05 - -GEN_ENS_PROD_NBRHD_PROB_WIDTH = 27 -GEN_ENS_PROD_NBRHD_PROB_SHAPE = CIRCLE -GEN_ENS_PROD_NBRHD_PROB_VLD_THRESH = 0.0 - -# GEN_ENS_PROD_NMEP_SMOOTH_VLD_THRESH = 0.0 -# GEN_ENS_PROD_NMEP_SMOOTH_SHAPE = CIRCLE -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_DX = 81.27 -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120 -# GEN_ENS_PROD_NMEP_SMOOTH_METHOD = GAUSSIAN -# GEN_ENS_PROD_NMEP_SMOOTH_WIDTH = 1 - -# GEN_ENS_PROD_CLIMO_MEAN_FILE_NAME = -# GEN_ENS_PROD_CLIMO_MEAN_FIELD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_MEAN_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_MEAN_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_MEAN_HOUR_INTERVAL = 6 - -# GEN_ENS_PROD_CLIMO_STDEV_FILE_NAME = -# GEN_ENS_PROD_CLIMO_STDEV_FIELD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_STDEV_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_STDEV_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_STDEV_HOUR_INTERVAL = 6 - -GEN_ENS_PROD_ENSEMBLE_FLAG_LATLON = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MEAN = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_STDEV = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MINUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_PLUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MIN = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MAX = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_RANGE = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_VLD_COUNT = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_FREQUENCY = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NMEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO_CDF = FALSE - -# GEN_ENS_PROD_ENS_MEMBER_IDS = -# GEN_ENS_PROD_CONTROL_ID = diff --git a/parm/metplus/GenEnsProd_REFC.conf b/parm/metplus/GenEnsProd_REFC.conf deleted file mode 100644 index 553c23f69e..0000000000 --- a/parm/metplus/GenEnsProd_REFC.conf +++ /dev/null @@ -1,191 +0,0 @@ -# GenEnsProd METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GenEnsProd - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GEN_ENS_PROD_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -GEN_ENS_PROD_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} - -### -# File I/O -### - -# -# Forecast model input directory for GenEnsProd. -# -GEN_ENS_PROD_INPUT_DIR = {{fcst_input_dir}} -# -# Template for forecast input to GenEnsProd relative to -# GEN_ENS_PROD_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -GEN_ENS_PROD_INPUT_TEMPLATE = {{fcst_input_fn_template}} - -# GEN_ENS_PROD_CTRL_INPUT_DIR = {INPUT_BASE} -# GEN_ENS_PROD_CTRL_INPUT_TEMPLATE = -# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2 - -# -# Output directory for GenEnsProd. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GEN_ENS_PROD_OUTPUT_DIR = {{output_dir}} -# -# Template for output from GenEnsProd relative to GEN_ENS_PROD_OUTPUT_DIR. -# -GEN_ENS_PROD_OUTPUT_TEMPLATE = gen_ens_prod_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} -# -# There are n ensembles but 1 is used as control, so specify n-1 members. -# -GEN_ENS_PROD_N_MEMBERS = {{num_ens_members}} - -### -# Field Info -### -# -# Ensemble variables and levels as specified in the ens field dictionary -# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS, -# (optional) ENS_VARn_OPTION -# -ENS_VAR1_NAME = {{fieldname_in_fcst_input}} -ENS_VAR1_LEVELS = L0 -ENS_VAR1_THRESH = {{field_thresholds}} - -### -# GenEnsProd -### - -# GEN_ENS_PROD_REGRID_TO_GRID = NONE -# GEN_ENS_PROD_REGRID_METHOD = NEAREST -# GEN_ENS_PROD_REGRID_WIDTH = 1 -# GEN_ENS_PROD_REGRID_VLD_THRESH = 0.5 -# GEN_ENS_PROD_REGRID_SHAPE = SQUARE - -# GEN_ENS_PROD_CENSOR_THRESH = -# GEN_ENS_PROD_CENSOR_VAL = -# GEN_ENS_PROD_CAT_THRESH = -# GEN_ENS_PROD_NC_VAR_STR = - -# Threshold for ratio of valid files to expected files to allow app to run -GEN_ENS_PROD_ENS_THRESH = 0.05 - -GEN_ENS_PROD_NBRHD_PROB_WIDTH = 27 -GEN_ENS_PROD_NBRHD_PROB_SHAPE = CIRCLE -GEN_ENS_PROD_NBRHD_PROB_VLD_THRESH = 0.0 - -# GEN_ENS_PROD_NMEP_SMOOTH_VLD_THRESH = 0.0 -# GEN_ENS_PROD_NMEP_SMOOTH_SHAPE = CIRCLE -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_DX = 81.27 -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120 -# GEN_ENS_PROD_NMEP_SMOOTH_METHOD = GAUSSIAN -# GEN_ENS_PROD_NMEP_SMOOTH_WIDTH = 1 - -# GEN_ENS_PROD_CLIMO_MEAN_FILE_NAME = -# GEN_ENS_PROD_CLIMO_MEAN_FIELD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_MEAN_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_MEAN_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_MEAN_HOUR_INTERVAL = 6 - -# GEN_ENS_PROD_CLIMO_STDEV_FILE_NAME = -# GEN_ENS_PROD_CLIMO_STDEV_FIELD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_STDEV_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_STDEV_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_STDEV_HOUR_INTERVAL = 6 - -GEN_ENS_PROD_ENSEMBLE_FLAG_LATLON = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MEAN = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_STDEV = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MINUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_PLUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MIN = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MAX = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_RANGE = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_VLD_COUNT = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_FREQUENCY = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NMEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO_CDF = FALSE - -# GEN_ENS_PROD_ENS_MEMBER_IDS = -# GEN_ENS_PROD_CONTROL_ID = diff --git a/parm/metplus/GenEnsProd_RETOP.conf b/parm/metplus/GenEnsProd_RETOP.conf deleted file mode 100644 index 49e5e5c3b6..0000000000 --- a/parm/metplus/GenEnsProd_RETOP.conf +++ /dev/null @@ -1,192 +0,0 @@ -# GenEnsProd METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GenEnsProd - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GEN_ENS_PROD_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -GEN_ENS_PROD_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} - -### -# File I/O -### - -# -# Forecast model input directory for GenEnsProd. -# -GEN_ENS_PROD_INPUT_DIR = {{fcst_input_dir}} -# -# Template for forecast input to GenEnsProd relative to -# GEN_ENS_PROD_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -GEN_ENS_PROD_INPUT_TEMPLATE = {{fcst_input_fn_template}} - -# GEN_ENS_PROD_CTRL_INPUT_DIR = {INPUT_BASE} -# GEN_ENS_PROD_CTRL_INPUT_TEMPLATE = -# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2 - -# -# Output directory for GenEnsProd. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GEN_ENS_PROD_OUTPUT_DIR = {{output_dir}} -# -# Template for output from GenEnsProd relative to GEN_ENS_PROD_OUTPUT_DIR. -# -GEN_ENS_PROD_OUTPUT_TEMPLATE = gen_ens_prod_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} -# -# There are n ensembles but 1 is used as control, so specify n-1 members. -# -GEN_ENS_PROD_N_MEMBERS = {{num_ens_members}} - -### -# Field Info -### -# -# Ensemble variables and levels as specified in the ens field dictionary -# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS, -# (optional) ENS_VARn_OPTION -# -ENS_VAR1_NAME = {{fieldname_in_fcst_input}} -ENS_VAR1_LEVELS = L0 -ENS_VAR1_THRESH = {{field_thresholds}} -ENS_VAR1_OPTIONS = convert(x) = x * 3.28084 * 0.001; ;; Convert from meters to kilofeet. - -### -# GenEnsProd -### - -# GEN_ENS_PROD_REGRID_TO_GRID = NONE -# GEN_ENS_PROD_REGRID_METHOD = NEAREST -# GEN_ENS_PROD_REGRID_WIDTH = 1 -# GEN_ENS_PROD_REGRID_VLD_THRESH = 0.5 -# GEN_ENS_PROD_REGRID_SHAPE = SQUARE - -# GEN_ENS_PROD_CENSOR_THRESH = -# GEN_ENS_PROD_CENSOR_VAL = -# GEN_ENS_PROD_CAT_THRESH = -# GEN_ENS_PROD_NC_VAR_STR = - -# Threshold for ratio of valid files to expected files to allow app to run -GEN_ENS_PROD_ENS_THRESH = 0.05 - -GEN_ENS_PROD_NBRHD_PROB_WIDTH = 27 -GEN_ENS_PROD_NBRHD_PROB_SHAPE = CIRCLE -GEN_ENS_PROD_NBRHD_PROB_VLD_THRESH = 0.0 - -# GEN_ENS_PROD_NMEP_SMOOTH_VLD_THRESH = 0.0 -# GEN_ENS_PROD_NMEP_SMOOTH_SHAPE = CIRCLE -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_DX = 81.27 -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120 -# GEN_ENS_PROD_NMEP_SMOOTH_METHOD = GAUSSIAN -# GEN_ENS_PROD_NMEP_SMOOTH_WIDTH = 1 - -# GEN_ENS_PROD_CLIMO_MEAN_FILE_NAME = -# GEN_ENS_PROD_CLIMO_MEAN_FIELD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_MEAN_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_MEAN_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_MEAN_HOUR_INTERVAL = 6 - -# GEN_ENS_PROD_CLIMO_STDEV_FILE_NAME = -# GEN_ENS_PROD_CLIMO_STDEV_FIELD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_STDEV_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_STDEV_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_STDEV_HOUR_INTERVAL = 6 - -GEN_ENS_PROD_ENSEMBLE_FLAG_LATLON = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MEAN = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_STDEV = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MINUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_PLUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MIN = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MAX = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_RANGE = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_VLD_COUNT = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_FREQUENCY = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NMEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO_CDF = FALSE - -# GEN_ENS_PROD_ENS_MEMBER_IDS = -# GEN_ENS_PROD_CONTROL_ID = diff --git a/parm/metplus/GridStat_APCP.conf b/parm/metplus/GridStat_APCP.conf deleted file mode 100644 index 51e5125951..0000000000 --- a/parm/metplus/GridStat_APCP.conf +++ /dev/null @@ -1,309 +0,0 @@ -# GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -#GRID_STAT_INTERP_FIELD = BOTH -#GRID_STAT_INTERP_VLD_THRESH = 1.0 -#GRID_STAT_INTERP_SHAPE = SQUARE -#GRID_STAT_INTERP_TYPE_METHOD = NEAREST -#GRID_STAT_INTERP_TYPE_WIDTH = 1 - -#GRID_STAT_GRID_WEIGHT_FLAG = -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the forecast ensemble member. This -# makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_{{ensmem_name}} -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. Here, -# we store the value of the original lead in this column, i.e. the lead -# with zero corresponding to the actual start time of the forecast (which -# is (cdate - time_lag)), not to cdate. This is just the lead in -# LEAD_SEQ with the time lag (time_lag) of the current forecast member -# added on. -# -# Uncomment this line only after upgrading to METplus 5.x. -#GRID_STAT_DESC = {lead?fmt=%H%M%S?shift={{time_lag}}} -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# List of forecast and corresponding observation fields to process. -# -# Note on use of set_attr_lead and ensemble member time-lagging: -# ------------------------------------------------------------- -# The set_attr_lead parameter appearing below in [FCST|OBS]_VAR_OPTIONS -# specifies the lead to use both in naming of the output .stat and .nc -# files and for setting the lead values contained in those files. This -# option causes MET/METplus to use the lead values in the variable LEAD_SEQ -# set above, which are the same for all ensemble forecast members (i.e. -# regardless of whether members are time lagged with respect to the -# nominal cycle date specified by cdate). If set_attr_lead were not -# specified as below, then MET/METplus would get the lead from the input -# forecast file, and that would in general differ from one ensemble member -# to the next depending on whether the member is time-lagged. That would -# cause confusion, so here, we always use lead values with zero lead -# corresponding to the nominal cdate. -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -FCST_VAR1_LEVELS = A{{accum_hh}} -FCST_VAR1_THRESH = {{field_thresholds}} -FCST_VAR1_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR1_LEVELS = A{{accum_hh}} -OBS_VAR1_THRESH = {{field_thresholds}} - -# -# Forecast data time window(s). -# -FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = 0 -OBS_GRID_STAT_FILE_WINDOW_END = 0 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = BOTH - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7 - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = STAT -GRID_STAT_OUTPUT_FLAG_CTC = STAT -GRID_STAT_OUTPUT_FLAG_CTS = STAT -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = STAT -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -#GRID_STAT_OUTPUT_FLAG_PCT = NONE -#GRID_STAT_OUTPUT_FLAG_PSTD = NONE -#GRID_STAT_OUTPUT_FLAG_PJC = NONE -#GRID_STAT_OUTPUT_FLAG_PRC = NONE -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT -GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ASNOW.conf b/parm/metplus/GridStat_ASNOW.conf deleted file mode 100644 index 3960a10c30..0000000000 --- a/parm/metplus/GridStat_ASNOW.conf +++ /dev/null @@ -1,283 +0,0 @@ -# GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET config file to pass to GridStat. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -#GRID_STAT_INTERP_FIELD = BOTH -#GRID_STAT_INTERP_VLD_THRESH = 1.0 -#GRID_STAT_INTERP_SHAPE = SQUARE -#GRID_STAT_INTERP_TYPE_METHOD = NEAREST -#GRID_STAT_INTERP_TYPE_WIDTH = 1 - -#GRID_STAT_GRID_WEIGHT_FLAG = - -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the forecast ensemble member. This -# makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_{{ensmem_name}} -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# List of forecast and corresponding observation fields to process. -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -FCST_VAR1_LEVELS = A{{accum_hh}} -FCST_VAR1_THRESH = {{field_thresholds}} -FCST_VAR1_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = A{{accum_hh}} -OBS_VAR1_THRESH = {{field_thresholds}} -OBS_VAR1_OPTIONS = convert(x) = 100.0*x; -# -# Forecast data time window(s). -# -FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = 0 -OBS_GRID_STAT_FILE_WINDOW_END = 0 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = BOTH - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = 5 - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = STAT -GRID_STAT_OUTPUT_FLAG_CTC = STAT -GRID_STAT_OUTPUT_FLAG_CTS = STAT -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = STAT -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -#GRID_STAT_OUTPUT_FLAG_PCT = NONE -#GRID_STAT_OUTPUT_FLAG_PSTD = NONE -#GRID_STAT_OUTPUT_FLAG_PJC = NONE -#GRID_STAT_OUTPUT_FLAG_PRC = NONE -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT -GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_REFC.conf b/parm/metplus/GridStat_REFC.conf deleted file mode 100644 index c7f34d27f9..0000000000 --- a/parm/metplus/GridStat_REFC.conf +++ /dev/null @@ -1,315 +0,0 @@ -# GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -GRID_STAT_INTERP_FIELD = NONE -GRID_STAT_INTERP_VLD_THRESH = 1.0 -GRID_STAT_INTERP_SHAPE = SQUARE -GRID_STAT_INTERP_TYPE_METHOD = NEAREST -GRID_STAT_INTERP_TYPE_WIDTH = 1 - -GRID_STAT_GRID_WEIGHT_FLAG = NONE -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the forecast ensemble member. This -# makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_{{ensmem_name}} -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. Here, -# we store the value of the original lead in this column, i.e. the lead -# with zero corresponding to the actual start time of the forecast (which -# is (cdate - time_lag)), not to cdate. This is just the lead in -# LEAD_SEQ with the time lag (time_lag) of the current forecast member -# added on. -# -# Uncomment this line only after upgrading to METplus 5.x. -#GRID_STAT_DESC = {lead?fmt=%H%M%S?shift={{time_lag}}} -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# List of forecast and corresponding observation fields to process. -# -# Note on use of set_attr_lead and ensemble member time-lagging: -# ------------------------------------------------------------- -# The set_attr_lead parameter appearing below in [FCST|OBS]_VAR_OPTIONS -# specifies the lead to use both in naming of the output .stat and .nc -# files and for setting the lead values contained in those files. This -# option causes MET/METplus to use the lead values in the variable LEAD_SEQ -# set above, which are the same for all ensemble forecast members (i.e. -# regardless of whether members are time lagged with respect to the -# nominal cycle date specified by cdate). If set_attr_lead were not -# specified as below, then MET/METplus would get the lead from the input -# forecast file, and that would in general differ from one ensemble member -# to the next depending on whether the member is time-lagged. That would -# cause confusion, so here, we always use lead values with zero lead -# corresponding to the nominal cdate. -# -FCST_VAR1_NAME = {{fieldname_in_fcst_input}} -FCST_VAR1_LEVELS = L0 -FCST_VAR1_THRESH = {{field_thresholds}} -FCST_VAR1_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = Z500 -OBS_VAR1_THRESH = {{field_thresholds}} -OBS_VAR1_OPTIONS = censor_thresh = [eq-999, <-20]; - censor_val = [-9999, -20]; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - -# -# Forecast data time window(s). -# -#FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -#FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300 -OBS_GRID_STAT_FILE_WINDOW_END = 300 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = BOTH - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = 1,3,5,7 - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = STAT -GRID_STAT_OUTPUT_FLAG_CTC = STAT -GRID_STAT_OUTPUT_FLAG_CTS = STAT -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = STAT -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -#GRID_STAT_OUTPUT_FLAG_PCT = NONE -#GRID_STAT_OUTPUT_FLAG_PSTD = NONE -#GRID_STAT_OUTPUT_FLAG_PJC = NONE -#GRID_STAT_OUTPUT_FLAG_PRC = NONE -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT -GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_RETOP.conf b/parm/metplus/GridStat_RETOP.conf deleted file mode 100644 index be91a0ba03..0000000000 --- a/parm/metplus/GridStat_RETOP.conf +++ /dev/null @@ -1,317 +0,0 @@ -# GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -GRID_STAT_INTERP_FIELD = NONE -GRID_STAT_INTERP_VLD_THRESH = 1.0 -GRID_STAT_INTERP_SHAPE = SQUARE -GRID_STAT_INTERP_TYPE_METHOD = NEAREST -GRID_STAT_INTERP_TYPE_WIDTH = 1 - -GRID_STAT_GRID_WEIGHT_FLAG = NONE -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the forecast ensemble member. This -# makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_{{ensmem_name}} -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. Here, -# we store the value of the original lead in this column, i.e. the lead -# with zero corresponding to the actual start time of the forecast (which -# is (cdate - time_lag)), not to cdate. This is just the lead in -# LEAD_SEQ with the time lag (time_lag) of the current forecast member -# added on. -# -# Uncomment this line only after upgrading to METplus 5.x. -#GRID_STAT_DESC = {lead?fmt=%H%M%S?shift={{time_lag}}} -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# List of forecast and corresponding observation fields to process. -# -# Note on use of set_attr_lead and ensemble member time-lagging: -# ------------------------------------------------------------- -# The set_attr_lead parameter appearing below in [FCST|OBS]_VAR_OPTIONS -# specifies the lead to use both in naming of the output .stat and .nc -# files and for setting the lead values contained in those files. This -# option causes MET/METplus to use the lead values in the variable LEAD_SEQ -# set above, which are the same for all ensemble forecast members (i.e. -# regardless of whether members are time lagged with respect to the -# nominal cycle date specified by cdate). If set_attr_lead were not -# specified as below, then MET/METplus would get the lead from the input -# forecast file, and that would in general differ from one ensemble member -# to the next depending on whether the member is time-lagged. That would -# cause confusion, so here, we always use lead values with zero lead -# corresponding to the nominal cdate. -# -FCST_VAR1_NAME = {{fieldname_in_fcst_input}} -FCST_VAR1_LEVELS = L0 -FCST_VAR1_THRESH = {{field_thresholds}} -FCST_VAR1_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; - convert(x) = x * 3.28084 * 0.001; - cnt_thresh = [ >0 ]; - cnt_logic = UNION; -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = Z500 -OBS_VAR1_THRESH = {{field_thresholds}} -OBS_VAR1_OPTIONS = convert(x) = x * 3280.84 * 0.001; - censor_thresh = [<=-9.84252,eq-3.28084]; - censor_val = [-9999,-16.4042]; - cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -# -# Forecast data time window(s). -# -#FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -#FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300 -OBS_GRID_STAT_FILE_WINDOW_END = 300 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = BOTH - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = 1,3,5,7 - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = STAT -GRID_STAT_OUTPUT_FLAG_CTC = STAT -GRID_STAT_OUTPUT_FLAG_CTS = STAT -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = STAT -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -#GRID_STAT_OUTPUT_FLAG_PCT = NONE -#GRID_STAT_OUTPUT_FLAG_PSTD = NONE -#GRID_STAT_OUTPUT_FLAG_PJC = NONE -#GRID_STAT_OUTPUT_FLAG_PRC = NONE -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT -GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensmean.conf b/parm/metplus/GridStat_ensmean.conf new file mode 100644 index 0000000000..4b8c71ddab --- /dev/null +++ b/parm/metplus/GridStat_ensmean.conf @@ -0,0 +1,662 @@ +# Ensemble mean {{MetplusToolName}} METplus Configuration + +[config] + +# List of applications (tools) to run. +PROCESS_LIST = {{MetplusToolName}} + +# time looping - options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +LOOP_BY = INIT + +# Format of INIT_BEG and INIT_END using % items +# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. +# see www.strftime.org for more information +# %Y%m%d%H expands to YYYYMMDDHH +INIT_TIME_FMT = %Y%m%d%H + +# Start time for METplus run - must match INIT_TIME_FMT +INIT_BEG = {{cdate}} + +# End time for METplus run - must match INIT_TIME_FMT +INIT_END = {{cdate}} + +# Increment between METplus runs (in seconds if no units are specified). +# Must be >= 60 seconds. +INIT_INCREMENT = 3600 + +# List of forecast leads to process for each run time (init or valid) +# In hours if units are not specified +# If unset, defaults to 0 (don't loop through forecast leads) +LEAD_SEQ = {{fhr_list}} +# +# Order of loops to process data - Options are times, processes +# Not relevant if only one item is in the PROCESS_LIST +# times = run all wrappers in the PROCESS_LIST for a single run time, then +# increment the run time and run all wrappers again until all times have +# been evaluated. +# processes = run the first wrapper in the PROCESS_LIST for all times +# specified, then repeat for the next item in the PROCESS_LIST until all +# wrappers have been run +# +LOOP_ORDER = times +# +# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. +# +LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} +# +# Specify the name of the METplus log file. +# +LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} +# +# Specify the location and name of the final METplus conf file. +# +METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}} +# +# Location of MET configuration file to pass to {{MetplusToolName}}. +# +# References PARM_BASE, which is the location of the parm directory +# corresponding to the ush directory of the run_metplus.py script that +# is called or the value of the environment variable METPLUS_PARM_BASE +# if set. +# +{{METPLUS_TOOL_NAME}}_CONFIG_FILE = {PARM_BASE}/met_config/{{MetplusToolName}}Config_wrapped + +# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary +# See MET User's Guide for more information +{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = FCST +{{METPLUS_TOOL_NAME}}_REGRID_VLD_THRESH = 0.5 +{{METPLUS_TOOL_NAME}}_REGRID_METHOD = BUDGET +{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2 +{{METPLUS_TOOL_NAME}}_REGRID_SHAPE = SQUARE + +#{{METPLUS_TOOL_NAME}}_INTERP_FIELD = BOTH +#{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = 1.0 +#{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = SQUARE +#{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = NEAREST +#{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 1 + +#{{METPLUS_TOOL_NAME}}_GRID_WEIGHT_FLAG = + +# +# Name to identify model (forecast) data in output. +# +# The variable MODEL is recorded in the stat files, and the data in +# these files is then plotted (e.g. using METViewer). Here, we add a +# suffix to MODEL that identifies the data as that for the ensemble +# mean. This makes it easier to identify each curve. +# +MODEL = {{vx_fcst_model_name}}_ensmean +# +# Name to identify observation data in output. +# +OBTYPE = {{obtype}} +# +# Value to enter under the DESC column in the output stat file. +# +{{METPLUS_TOOL_NAME}}_DESC = NA +# +# Overrides of MET configuration defaults. +# +{{METPLUS_TOOL_NAME}}_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; +# +# List of forecast and corresponding observation fields to process. +# +{#- +Import the file containing jinja macros. +#} +{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %} + +{#- +Set the probabilistic threshold to be used for the forecast field. If +necessary, this can be changed to be an input parameter in the calling +script instead of a hard-coded value as below. +#} +{%- set thresh_fcst_prob = '==0.1' %} + +{#- +Jinja requires certain variables to be defined globally within the template +before they can be used in if-statements and other scopes (see Jinja +scoping rules). Define such variables. +#} +{%- set level_fcst = '' %} +{%- set level_obs = '' %} +{%- set indx_level_fcst = '' %} + +{%- set valid_threshes_fcst = [] %} +{%- set valid_threshes_obs = [] %} +{%- set threshes_fcst = '' %} +{%- set threshes_obs = '' %} +{%- set indx_input_thresh_fcst = '' %} + +{%- set opts_indent = '' %} +{%- set opts_indent_len = '' %} +{%- set tmp = '' %} +{%- set error_msg = '' %} + +{#- +Make sure that the set of field groups for forecasts and observations +are identical. +#} +{%- set fgs_fcst = vx_config_dict['fcst'].keys()|list %} +{%- set fgs_obs = vx_config_dict['obs'].keys()|list %} +{%- if (fgs_fcst != fgs_obs) %} + {%- set error_msg = '\n' ~ +'The list of valid field groups for forecasts (fgs_fcst) must be identical\n' ~ +'to that for observations (fgs_obs) but isn\'t:\n' ~ +' fgs_fcst = ' ~ fgs_fcst ~ '\n' ~ +' fgs_obs = ' ~ fgs_obs %} + {{metplus_macros.print_err_and_quit(error_msg)}} +{%- endif %} + +{#- +Extract the lists of forecast and observation dictionaries containing +the valid fields, levels, and thresholds corresponding to the specified +field group (input_field_group). Note that it would be simpler to have +these be just dictionaries in which the keys are the field names (instead +of them being LISTS of dictionaries in which each dictionary contains a +single key that is the field name), but that approach cannot be used here +because it is possible for field names to be repeated (for both forecasts +and observations). For example, in the observations, the field name +'PRWE' appears more than once, each time with a different threshold, and +the combination of name and threshold is what constitutes a unique field, +not just the name by itself. +#} +{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %} +{%- set fields_levels_threshes_obs = vx_config_dict['obs'][input_field_group] %} + +{#- +Reset the specified forecast level so that if it happens to be an +accumulation (e.g. 'A03'), the leading zeros in front of the hour are +stipped out (e.g. reset to 'A3'). +#} +{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} + +{#- +Ensure that the specified input forecast level(s) (input_level_fcst) and +threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the +set(s) of valid forecast levels and thresholds, respectively, specified +in fields_levels_threshes_fcst. +#} +{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }} +{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }} + +{#- +Some fields in the specified field group (input_field_group) may need to +be excluded from the METplus config file because calculating means for +them doesn't make sense. List these (for each input_field_group) in the +following dictionary. +#} +{%- set fields_fcst_to_exclude_by_field_group = + {'APCP': [], + 'ASNOW': [], + 'REFC': [], + 'RETOP': [], + 'ADPSFC': ['TCDC', 'VIS', 'HGT'], + 'ADPUPA': []} %} +{%- set fields_fcst_to_exclude = fields_fcst_to_exclude_by_field_group[input_field_group] %} + +{#- +For convenience, create lists of valid forecast and observation field +names. +#} +{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %} +{%- set valid_fields_fcst = [] %} +{%- for i in range(0,num_valid_fields_fcst) %} + {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %} + {%- set tmp = valid_fields_fcst.append(field) %} +{%- endfor %} + +{%- set valid_fields_obs = [] %} +{%- set num_valid_fields_obs = fields_levels_threshes_obs|length %} +{%- for i in range(0,num_valid_fields_obs) %} + {%- set field = fields_levels_threshes_obs[i].keys()|list|join('') %} + {%- set tmp = valid_fields_obs.append(field) %} +{%- endfor %} + +{#- +Ensure that the number of valid fields for forecasts is equal to that +for the observations. +#} +{%- set num_valid_fields = 0 %} +{%- if (num_valid_fields_fcst != num_valid_fields_obs) %} + {%- set error_msg = '\n' ~ +'The number of valid forecast fields (num_valid_fields_fcst) must be\n' ~ +'equal to the number of valid observation fields (num_valid_fields_obs)\n' ~ +'but isn\'t:\n' ~ +' num_valid_fields_fcst = ' ~ num_valid_fields_fcst ~ '\n' ~ +' num_valid_fields_obs = ' ~ num_valid_fields_obs ~ '\n' ~ +'The lists of valid forecast and observation fields are:\n' ~ +' valid_fields_fcst = ' ~ valid_fields_fcst ~ '\n' ~ +' valid_fields_obs = ' ~ valid_fields_obs ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} +{%- else %} + {%- set num_valid_fields = num_valid_fields_fcst %} +{%- endif %} + +{#- +Loop over the valid fields and set field names, levels, thresholds, and/ +or options for each field, both for forecasts and for obseratiions, in +the METplus configuration file. +#} +{%- set ns = namespace(var_count = 0) %} + +{%- for i in range(0,num_valid_fields) if valid_fields_fcst[i] not in fields_fcst_to_exclude %} + + {%- set field_fcst = valid_fields_fcst[i] %} + {%- set field_obs = valid_fields_obs[i] %} + +{#- +For convenience, create lists of valid forecast and observation levels +for the current field. Then check that the number of valid levels for +forecasts is the same as that for observations. +#} + {%- set valid_levels_fcst = fields_levels_threshes_fcst[i][field_fcst].keys()|list %} + {%- set valid_levels_obs = fields_levels_threshes_obs[i][field_obs].keys()|list %} + +{#- +Extract dictionary of valid forecast levels (the dictionary keys) and +corresponding lists of valid thresholds (the values) for each level. +Then loop over these levels and corresponding lists of thresholds to set +both the forecast and observation field names, levels, thresholds, and/or +options. +#} + {%- set valid_levels_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst] %} + {%- for level_fcst, valid_threshes_fcst in valid_levels_threshes_fcst.items() %} + + {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %} +{#- +Increment the METplus variable counter. +#} + {%- set ns.var_count = ns.var_count+1 %} + +{#- +Set forecast field name. Note that this has to exactly match the name +of the field in the input forecast file(s). + +The input forecast files are generated by the MET/METplus GenEnsProd +tool. That tool adds the field's level to the variable names in its +output file to ensure that all variables in the file have distinct names. +For example, if the same field, say APCP, is output at two different +levels, say at A3 and A6 (for APCP, "levels" are really accumulation +periods), there need to be two variables in the output file, and they +obviously can't both be named "APCP", so GenEnsProd names one "APCP_A3" +and the other "APCP_A6". Here, the level is stored in the variable +level_fcst and, below, is included in the name of the forecast field. + +For accumulated fields, the field name in the input forecast file contains +TWO references to the accumulation period. The first is the level of the +forecast field added by GenEnsProd as described above. The second is +another reference to this same level (accumulation period) but added by +the MET/METplus's PcpCombine tool (whose output file is the input into +GenEnsProd). PcpCombine adds this reference to the level (really the +accumulation period) to the field's name for the same reason that +GenEnsProd does, i.e. to ensure that the names of variables in the output +file are distinct. Here, this accumulation period is stored in the +variable accum_hh. Thus, for accumulated fields, below we add both +accum_hh and level_fcst to the field name to get an exact field name +match. +#} + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{accum_hh}}_{{level_fcst}}_ENS_MEAN + {%- else %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{level_fcst}}_ENS_MEAN + {%- endif %} + +{#- +Set forecast field level. +#} +FCST_VAR{{ns.var_count}}_LEVELS = {{level_fcst}} + +{#- +Set forecast field threshold(s). Note that no forecast thresholds are +included in the METplus configuration file if input_thresh_fcst is set +to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +If input_thresh_fcst is set to 'all', set the list of forecast thresholds +to the full set of valid values. +#} + {%- if (input_thresh_fcst == 'all') %} + + {%- set threshes_fcst = valid_threshes_fcst %} +{#- +If input_thresh_fcst is set to a specific value: + 1) Ensure that input_thresh_fcst exists in the list of valid forecast + thresholds. + 2) Get the index of input_thresh_fcst in the list of valid forecast + thresholds. This will be needed later below when setting the + observation threshold(s). + 3) Use this index to set the forecast threshold to a one-element list + containing the specified forecast threshold. +#} + {%- else %} + + {%- if input_thresh_fcst not in valid_threshes_fcst %} + {%- set error_msg = '\n' ~ +'For the current forecast field (field_fcst) and forecast level (level_fcst),\n' ~ +'the input forecast threshold (input_thresh_fcst) does not exist in the list\n' ~ +'of valid forecast thresholds (valid_threshes_fcst):\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' level_fcst = ' ~ level_fcst ~ '\n' ~ +' valid_threshes_fcst = ' ~ valid_threshes_fcst ~ '\n' ~ +' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %} + {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %} + + {%- endif %} +{#- +If threshes_fcst has been reset to something other than its default +value of an empty list, then set the forecast thresholds in the METplus +configuration file because that implies threshes_fcst was set above to +a non-empty value. Then reset threshes_fcst to its default value for +proper processing of thresholds for the next field. +#} + {%- if (threshes_fcst != []) %} +FCST_VAR{{ns.var_count}}_THRESH = {{threshes_fcst|join(', ')}} + {%- endif %} + {%- set threshes_fcst = [] %} + + {%- endif %} + +{#- +Set forecast field options. +#} + {%- set opts_indent_len = 20 %} + {%- if (ns.var_count > 9) and (ns.var_count <= 99) %} + {%- set opts_indent_len = opts_indent_len + 1 %} + {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %} + {%- set opts_indent_len = opts_indent_len + 2 %} + {%- elif (ns.var_count > 999) %} + {%- set opts_indent_len = opts_indent_len + 3 %} + {%- endif %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'ADPUPA' %} + + {%- if field_fcst == 'CAPE' %} +FCST_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; + {%- endif %} + + {%- endif %} + +{#- +Set observation field name. Note that this has to exactly match the name +of the field in the input observation file. + +For accumulated fields, the input observation file is generated by MET's +PcpCombine tool. In that file, the field name consists of the observation +field name here (field_obs) with the accumulation period appended to it +(separated by an underscore), so we must do the same here to get an exact +match. + +Note: +Turns out for ASNOW, PcpCombine is not run for obs, so we exclude that +from the "if" clause here (so it goes into the "else"). For workflow +behavior uniformity between APCP and ASNOW, consider running PcpCombine +for ASNOW observations as well (just as it's run for APCP observations). + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +#} + {%- if (input_field_group in ['APCP']) %} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}}_{{accum_hh}} + {%- else %} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}} + {%- endif %} + +{#- +Set observation field level. +#} + {%- set indx_level_fcst = valid_levels_fcst.index(level_fcst) %} + {%- set level_obs = valid_levels_obs[indx_level_fcst] %} +OBS_VAR{{ns.var_count}}_LEVELS = {{level_obs}} + +{#- +Set observation field threshold(s). Note that no observation thresholds +are included in the METplus configuration file if input_thresh_fcst is +set to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +Set the list of valid observation thresholds to the one corresponding to +the current observation level (level_obs). +#} + {%- set valid_threshes_obs = fields_levels_threshes_obs[i][field_obs][level_obs] %} +{#- +If input_thresh_fcst is set to 'all', set the list of observation thresholds +to the full set of valid values. +#} + {%- if (input_thresh_fcst == 'all') %} + + {%- set threshes_obs = valid_threshes_obs %} +{#- +If input_thresh_fcst is set to a specific forecast threshold, then the +observation threshold is given by the element in the list of valid +observation thresholds that has the same index as that of input_thresh_fcst +in the list of valid forecast thresholds. +#} + {%- else %} + {%- set threshes_obs = [valid_threshes_obs[indx_input_thresh_fcst]] %} + {%- endif %} +{#- +If threshes_obs has been reset to something other than its default value +of an empty list, then set the observation thresholds in the METplus +configuration file because that implies threshes_obs was set above to +a non-empty value. Then reset threshes_obs to its default value for +proper processing of thresholds for the next field. +#} + {%- if (threshes_obs != []) %} +OBS_VAR{{ns.var_count}}_THRESH = {{threshes_obs|join(', ')}} + {%- endif %} + {%- set threshes_obs = [] %} + + {%- endif %} + +{#- +Set observation field options. +#} + {%- set opts_indent_len = opts_indent_len - 1 %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'ASNOW' %} + + {%- if field_obs == 'ASNOW' %} +OBS_VAR{{ns.var_count}}_OPTIONS = convert(x) = 100.0*x; + {%- endif %} + + {%- elif input_field_group == 'ADPUPA' %} + + {%- if field_obs == 'CAPE' %} +OBS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; +{{opts_indent}}cnt_logic = UNION; + {%- elif field_obs == 'PBL' %} +OBS_VAR{{ns.var_count}}_OPTIONS = desc = "TKE"; + {%- endif %} + + {%- endif %} + +{#- +Print out a newline to separate the settings for the current field (both +forecast and observation settings) from those for the next field. +#} + {{- '\n' }} + + {%- endif %} + + {%- endfor %} +{%- endfor %} +# +# Forecast data time window(s). +# +FCST_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_BEGIN = 0 +FCST_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_END = 0 +# +# Observation data time window(s). +# +OBS_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_BEGIN = 0 +OBS_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_END = 0 + +# MET {{MetplusToolName}} neighborhood values +# See the MET User's Guide {{MetplusToolName}} section for more information +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_FIELD = BOTH + +# width value passed to nbrhd dictionary in the MET config file +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_WIDTH = 3,5,7 + +# shape value passed to nbrhd dictionary in the MET config file +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_SHAPE = SQUARE + +# cov thresh list passed to nbrhd dictionary in the MET config file +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_COV_THRESH = >=0.5 + +# Set to true to run {{MetplusToolName}} separately for each field specified +# Set to false to create one run of {{MetplusToolName}} per run time that +# includes all fields specified. +{{METPLUS_TOOL_NAME}}_ONCE_PER_FIELD = False +# +# Set to true if forecast data is probabilistic. +# +FCST_IS_PROB = False +# +# Only used if FCST_IS_PROB is true - sets probabilistic threshold +# +FCST_{{METPLUS_TOOL_NAME}}_PROB_THRESH = ==0.1 + +{{METPLUS_TOOL_NAME}}_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} + +# Climatology data +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FILE_NAME = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FIELD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_WIDTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_SHAPE = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_MATCH_MONTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_DAY_INTERVAL = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_HOUR_INTERVAL = + +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FILE_NAME = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FIELD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_WIDTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_SHAPE = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_TIME_INTERP_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_MATCH_MONTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_DAY_INTERVAL = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_HOUR_INTERVAL = + +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1 +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = True + +{{METPLUS_TOOL_NAME}}_MASK_GRID = + +# Statistical output types +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_FHO = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTC = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTS = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTC = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTS = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CNT = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SL1L2 = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SAL1L2 = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VL1L2 = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VAL1L2 = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VCNT = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PCT = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PSTD = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PJC = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PRC = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV = BOTH +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCTC = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCTS = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCNT = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_GRAD = BOTH +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_DMAP = NONE + +# NetCDF matched pairs output file +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_VAR_NAME = +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_LATLON = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_RAW = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_DIFF = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_CLIMO = FALSE +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_CLIMO_CDP = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_WEIGHT = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_NBRHD = FALSE +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_FOURIER = FALSE +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_GRADIENT = FALSE +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_APPLY_MASK = FALSE + +# End of [config] section and start of [dir] section. +[dir] +# +# Directory containing observation input to {{MetplusToolName}}. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{obs_input_dir}} +# +# Directory containing forecast input to {{MetplusToolName}}. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}} +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR = +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR = +# +# Directory in which to write output from {{MetplusToolName}}. +# +# OUTPUT_BASE apparently has to be set to something; it cannot be left +# to its default value. But it is not explicitly used elsewhere in this +# configuration file. +# +OUTPUT_BASE = {{output_base}} +{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}} +# +# Directory for staging data. +# +STAGING_DIR = {{staging_dir}} + +# End of [dir] section and start of [filename_templates] section. +[filename_templates] +# +# Template for observation input to {{MetplusToolName}} relative to +# OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{obs_input_fn_template}} +# +# Template for forecast input to {{MetplusToolName}} relative to +# FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}} +# +# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR. +# +{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_TEMPLATE = +# +# Variable used to specify one or more verification mask files for +# {{MetplusToolName}}. Not used for this example. +# +{{METPLUS_TOOL_NAME}}_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensmean_APCP.conf b/parm/metplus/GridStat_ensmean_APCP.conf deleted file mode 100644 index 6d3956c8e6..0000000000 --- a/parm/metplus/GridStat_ensmean_APCP.conf +++ /dev/null @@ -1,282 +0,0 @@ -# Ensemble mean GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -#GRID_STAT_INTERP_FIELD = BOTH -#GRID_STAT_INTERP_VLD_THRESH = 1.0 -#GRID_STAT_INTERP_SHAPE = SQUARE -#GRID_STAT_INTERP_TYPE_METHOD = NEAREST -#GRID_STAT_INTERP_TYPE_WIDTH = 1 - -#GRID_STAT_GRID_WEIGHT_FLAG = - -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as that for the ensemble -# mean. This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensmean -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# List of forecast and corresponding observation fields to process. -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_MEAN -FCST_VAR1_LEVELS = A{{accum_hh}} -FCST_VAR1_THRESH = {{field_thresholds}} -OBS_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR1_LEVELS = A{{accum_hh}} -OBS_VAR1_THRESH = {{field_thresholds}} - -# -# Forecast data time window(s). -# -FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = 0 -OBS_GRID_STAT_FILE_WINDOW_END = 0 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = BOTH - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7 - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = STAT -GRID_STAT_OUTPUT_FLAG_CTC = STAT -GRID_STAT_OUTPUT_FLAG_CTS = STAT -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = STAT -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -#GRID_STAT_OUTPUT_FLAG_PCT = NONE -#GRID_STAT_OUTPUT_FLAG_PSTD = NONE -#GRID_STAT_OUTPUT_FLAG_PJC = NONE -#GRID_STAT_OUTPUT_FLAG_PRC = NONE -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT -GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensmean_ASNOW.conf b/parm/metplus/GridStat_ensmean_ASNOW.conf deleted file mode 100644 index 6fb8951a3f..0000000000 --- a/parm/metplus/GridStat_ensmean_ASNOW.conf +++ /dev/null @@ -1,287 +0,0 @@ -# Ensemble mean GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -#GRID_STAT_INTERP_FIELD = BOTH -#GRID_STAT_INTERP_VLD_THRESH = 1.0 -#GRID_STAT_INTERP_SHAPE = SQUARE -#GRID_STAT_INTERP_TYPE_METHOD = NEAREST -#GRID_STAT_INTERP_TYPE_WIDTH = 1 - -#GRID_STAT_GRID_WEIGHT_FLAG = - -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as that for the ensemble -# mean. This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensmean -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# List of forecast and corresponding observation fields to process. -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_MEAN -FCST_VAR1_LEVELS = A{{accum_hh}} -FCST_VAR1_THRESH = {{field_thresholds}} -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = A{{accum_hh}} -OBS_VAR1_THRESH = {{field_thresholds}} -OBS_VAR1_OPTIONS = convert(x) = 100.0*x; -# -# Forecast data time window(s). -# -FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = 0 -OBS_GRID_STAT_FILE_WINDOW_END = 0 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = BOTH - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7 - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = STAT -GRID_STAT_OUTPUT_FLAG_CTC = STAT -GRID_STAT_OUTPUT_FLAG_CTS = STAT -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = STAT -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -#GRID_STAT_OUTPUT_FLAG_PCT = NONE -#GRID_STAT_OUTPUT_FLAG_PSTD = NONE -#GRID_STAT_OUTPUT_FLAG_PJC = NONE -#GRID_STAT_OUTPUT_FLAG_PRC = NONE -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT -GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensmean_REFC.conf b/parm/metplus/GridStat_ensmean_REFC.conf deleted file mode 100644 index 451c82dfd5..0000000000 --- a/parm/metplus/GridStat_ensmean_REFC.conf +++ /dev/null @@ -1,313 +0,0 @@ -# Ensemble mean GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -GRID_STAT_INTERP_FIELD = NONE -GRID_STAT_INTERP_VLD_THRESH = 1.0 -GRID_STAT_INTERP_SHAPE = SQUARE -GRID_STAT_INTERP_TYPE_METHOD = NEAREST -GRID_STAT_INTERP_TYPE_WIDTH = 1 - -GRID_STAT_GRID_WEIGHT_FLAG = NONE -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as that for the ensemble -# mean. This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensmean -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# String to search for in the foreast input files for forecast variable -# 1. -# -# Note: -# This is the name of the field in the NetCDF file(s) created by MET's -# gen_ens_prod tool. This tool reads in the grib2 file(s) (in this case -# of forecasts) and outputs NetCDF file(s) in which the array names -# consist of the value of fieldname_in_met_output plus a suffix that -# specifies additional properties of the data in the array such as the -# level, the type of statistic, etc. In this case, this suffix is -# "_L0_ENS_MEAN". Thus, below, FCST_VAR1_NAME must be set to the value -# of fieldname_in_met_output with "_L0_ENS_MEAN" appended to it. -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_L0_ENS_MEAN -FCST_VAR1_LEVELS = L0 -FCST_VAR1_THRESH = {{field_thresholds}} -# -# String to search for in the observation input files for observation -# variable 1. -# -# Note: -# This is the name of the field in the grib2 observation file. Thus, -# it should not be set to {{fieldname_in_met_output}} because the -# value of fieldname_in_met_output is in general not the same as the -# name of the field in the grib2 observation file (although it can be -# for certain fields). If you do and it doesn't match, you may get an -# error like this from METplus: -# ERROR : VarInfoGrib2::set_dict() -> unrecognized GRIB2 field abbreviation ... -# -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = Z500 -OBS_VAR1_THRESH = {{field_thresholds}} -OBS_VAR1_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; -# -# Forecast data time window(s). -# -#FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -#FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300 -OBS_GRID_STAT_FILE_WINDOW_END = 300 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = BOTH - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7 - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = STAT -GRID_STAT_OUTPUT_FLAG_CTC = STAT -GRID_STAT_OUTPUT_FLAG_CTS = STAT -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = STAT -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -#GRID_STAT_OUTPUT_FLAG_PCT = NONE -#GRID_STAT_OUTPUT_FLAG_PSTD = NONE -#GRID_STAT_OUTPUT_FLAG_PJC = NONE -#GRID_STAT_OUTPUT_FLAG_PRC = NONE -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT -GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensmean_RETOP.conf b/parm/metplus/GridStat_ensmean_RETOP.conf deleted file mode 100644 index a881ed3ab5..0000000000 --- a/parm/metplus/GridStat_ensmean_RETOP.conf +++ /dev/null @@ -1,315 +0,0 @@ -# Ensemble mean GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -GRID_STAT_INTERP_FIELD = NONE -GRID_STAT_INTERP_VLD_THRESH = 1.0 -GRID_STAT_INTERP_SHAPE = SQUARE -GRID_STAT_INTERP_TYPE_METHOD = NEAREST -GRID_STAT_INTERP_TYPE_WIDTH = 1 - -GRID_STAT_GRID_WEIGHT_FLAG = NONE -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as that for the ensemble -# mean. This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensmean -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# String to search for in the foreast input files for forecast variable -# 1. -# -# Note: -# This is the name of the field in the NetCDF file(s) created by MET's -# gen_ens_prod tool. This tool reads in the grib2 file(s) (in this case -# of forecasts) and outputs NetCDF file(s) in which the array names -# consist of the value of fieldname_in_met_output plus a suffix that -# specifies additional properties of the data in the array such as the -# level, the type of statistic, etc. In this case, this suffix is -# "_L0_ENS_MEAN". Thus, below, FCST_VAR1_NAME must be set to the value -# of fieldname_in_met_output with "_L0_ENS_MEAN" appended to it. -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_L0_ENS_MEAN -FCST_VAR1_LEVELS = L0 -FCST_VAR1_THRESH = {{field_thresholds}} -FCST_VAR1_OPTIONS = convert(x) = x * 3.28084 * 0.001; -# -# String to search for in the observation input files for observation -# variable 1. -# -# Note: -# This is the name of the field in the grib2 observation file. Thus, -# it should not be set to {{fieldname_in_met_output}} because the -# value of fieldname_in_met_output is in general not the same as the -# name of the field in the grib2 observation file (although it can be -# for certain fields). If you do and it doesn't match, you may get an -# error like this from METplus: -# ERROR : VarInfoGrib2::set_dict() -> unrecognized GRIB2 field abbreviation ... -# -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = Z500 -OBS_VAR1_THRESH = {{field_thresholds}} -OBS_VAR1_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; -# -# Forecast data time window(s). -# -#FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -#FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300 -OBS_GRID_STAT_FILE_WINDOW_END = 300 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = BOTH - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7 - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = STAT -GRID_STAT_OUTPUT_FLAG_CTC = STAT -GRID_STAT_OUTPUT_FLAG_CTS = STAT -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = STAT -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -#GRID_STAT_OUTPUT_FLAG_PCT = NONE -#GRID_STAT_OUTPUT_FLAG_PSTD = NONE -#GRID_STAT_OUTPUT_FLAG_PJC = NONE -#GRID_STAT_OUTPUT_FLAG_PRC = NONE -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT -GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensprob.conf b/parm/metplus/GridStat_ensprob.conf new file mode 100644 index 0000000000..6a4873e446 --- /dev/null +++ b/parm/metplus/GridStat_ensprob.conf @@ -0,0 +1,675 @@ +# Ensemble probabilistic {{MetplusToolName}} METplus Configuration + +[config] + +# List of applications (tools) to run. +PROCESS_LIST = {{MetplusToolName}} + +# time looping - options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +LOOP_BY = INIT + +# Format of INIT_BEG and INIT_END using % items +# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. +# see www.strftime.org for more information +# %Y%m%d%H expands to YYYYMMDDHH +INIT_TIME_FMT = %Y%m%d%H + +# Start time for METplus run - must match INIT_TIME_FMT +INIT_BEG = {{cdate}} + +# End time for METplus run - must match INIT_TIME_FMT +INIT_END = {{cdate}} + +# Increment between METplus runs (in seconds if no units are specified). +# Must be >= 60 seconds. +INIT_INCREMENT = 3600 + +# List of forecast leads to process for each run time (init or valid) +# In hours if units are not specified +# If unset, defaults to 0 (don't loop through forecast leads) +LEAD_SEQ = {{fhr_list}} +# +# Order of loops to process data - Options are times, processes +# Not relevant if only one item is in the PROCESS_LIST +# times = run all wrappers in the PROCESS_LIST for a single run time, then +# increment the run time and run all wrappers again until all times have +# been evaluated. +# processes = run the first wrapper in the PROCESS_LIST for all times +# specified, then repeat for the next item in the PROCESS_LIST until all +# wrappers have been run +# +LOOP_ORDER = times +# +# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. +# +LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} +# +# Specify the name of the METplus log file. +# +LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} +# +# Specify the location and name of the final METplus conf file. +# +METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}} +# +# Location of MET configuration file to pass to {{MetplusToolName}}. +# +# References PARM_BASE, which is the location of the parm directory +# corresponding to the ush directory of the run_metplus.py script that +# is called or the value of the environment variable METPLUS_PARM_BASE +# if set. +# +{{METPLUS_TOOL_NAME}}_CONFIG_FILE = {PARM_BASE}/met_config/{{MetplusToolName}}Config_wrapped + +# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary +# See MET User's Guide for more information +{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = FCST +{{METPLUS_TOOL_NAME}}_REGRID_VLD_THRESH = 0.5 +{{METPLUS_TOOL_NAME}}_REGRID_METHOD = BUDGET +{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2 +{{METPLUS_TOOL_NAME}}_REGRID_SHAPE = SQUARE + +{%- if input_field_group in ['APCP', 'ASNOW'] %} + +#{{METPLUS_TOOL_NAME}}_INTERP_FIELD = BOTH +#{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = 1.0 +#{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = SQUARE +#{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = NEAREST +#{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 1 + +#{{METPLUS_TOOL_NAME}}_GRID_WEIGHT_FLAG = +{%- elif input_field_group in ['REFC', 'RETOP'] %} + +{{METPLUS_TOOL_NAME}}_INTERP_FIELD = NONE +{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = 1.0 +{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = SQUARE +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = NEAREST +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 1 + +{{METPLUS_TOOL_NAME}}_GRID_WEIGHT_FLAG = NONE +{%- endif %} +# +# Name to identify model (forecast) data in output. +# +# The variable MODEL is recorded in the stat files, and the data in +# these files is then plotted (e.g. using METViewer). Here, we add a +# suffix to MODEL that identifies the data as ensemble-probabilistic. +# This makes it easier to identify each curve. +# +MODEL = {{vx_fcst_model_name}}_ensprob +# +# Name to identify observation data in output. +# +OBTYPE = {{obtype}} +# +# Value to enter under the DESC column in the output stat file. +# +{{METPLUS_TOOL_NAME}}_DESC = NA +# +# Overrides of MET configuration defaults. +# +{{METPLUS_TOOL_NAME}}_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; +# +# List of forecast and corresponding observation fields to process. +# +{#- +Import the file containing jinja macros. +#} +{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %} + +{#- +Set the probabilistic threshold to be used for the forecast field. If +necessary, this can be changed to be an input parameter in the calling +script instead of a hard-coded value as below. +#} +{%- set thresh_fcst_prob = '==0.1' %} + +{#- +Jinja requires certain variables to be defined globally within the template +before they can be used in if-statements and other scopes (see Jinja +scoping rules). Define such variables. +#} +{%- set level_fcst = '' %} +{%- set level_obs = '' %} +{%- set indx_level_fcst = '' %} + +{%- set valid_threshes_fcst = [] %} +{%- set valid_threshes_obs = [] %} +{%- set thresh_fcst = '' %} +{%- set thresh_obs = '' %} +{%- set indx_thresh_fcst = '' %} +{%- set thresh_fcst_and_or = '' %} + +{%- set opts_indent = '' %} +{%- set opts_indent_len = '' %} +{%- set tmp = '' %} +{%- set error_msg = '' %} + +{#- +Make sure that the set of field groups for forecasts and observations +are identical. +#} +{%- set fgs_fcst = vx_config_dict['fcst'].keys()|list %} +{%- set fgs_obs = vx_config_dict['obs'].keys()|list %} +{%- if (fgs_fcst != fgs_obs) %} + {%- set error_msg = '\n' ~ +'The list of valid field groups for forecasts (fgs_fcst) must be identical\n' ~ +'to that for observations (fgs_obs) but isn\'t:\n' ~ +' fgs_fcst = ' ~ fgs_fcst ~ '\n' ~ +' fgs_obs = ' ~ fgs_obs %} + {{metplus_macros.print_err_and_quit(error_msg)}} +{%- endif %} + +{#- +Extract the lists of forecast and observation dictionaries containing +the valid fields, levels, and thresholds corresponding to the specified +field group (input_field_group). Note that it would be simpler to have +these be just dictionaries in which the keys are the field names (instead +of them being LISTS of dictionaries in which each dictionary contains a +single key that is the field name), but that approach cannot be used here +because it is possible for field names to be repeated (for both forecasts +and observations). For example, in the observations, the field name +'PRWE' appears more than once, each time with a different threshold, and +the combination of name and threshold is what constitutes a unique field, +not just the name by itself. +#} +{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %} +{%- set fields_levels_threshes_obs = vx_config_dict['obs'][input_field_group] %} + +{#- +Reset the specified forecast level so that if it happens to be an +accumulation (e.g. 'A03'), the leading zeros in front of the hour are +stipped out (e.g. reset to 'A3'). +#} +{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} + +{#- +Ensure that the specified input forecast level(s) (input_level_fcst) and +threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the +set(s) of valid forecast levels and thresholds, respectively, specified +in fields_levels_threshes_fcst. +#} +{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }} +{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }} + +{#- +For convenience, create lists of valid forecast and observation field +names. +#} +{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %} +{%- set valid_fields_fcst = [] %} +{%- for i in range(0,num_valid_fields_fcst) %} + {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %} + {%- set tmp = valid_fields_fcst.append(field) %} +{%- endfor %} + +{%- set valid_fields_obs = [] %} +{%- set num_valid_fields_obs = fields_levels_threshes_obs|length %} +{%- for i in range(0,num_valid_fields_obs) %} + {%- set field = fields_levels_threshes_obs[i].keys()|list|join('') %} + {%- set tmp = valid_fields_obs.append(field) %} +{%- endfor %} + +{#- +Ensure that the number of valid fields for forecasts is equal to that +for the observations. +#} +{%- set num_valid_fields = 0 %} +{%- if (num_valid_fields_fcst != num_valid_fields_obs) %} + {%- set error_msg = '\n' ~ +'The number of valid forecast fields (num_valid_fields_fcst) must be\n' ~ +'equal to the number of valid observation fields (num_valid_fields_obs)\n' ~ +'but isn\'t:\n' ~ +' num_valid_fields_fcst = ' ~ num_valid_fields_fcst ~ '\n' ~ +' num_valid_fields_obs = ' ~ num_valid_fields_obs ~ '\n' ~ +'The lists of valid forecast and observation fields are:\n' ~ +' valid_fields_fcst = ' ~ valid_fields_fcst ~ '\n' ~ +' valid_fields_obs = ' ~ valid_fields_obs ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} +{%- else %} + {%- set num_valid_fields = num_valid_fields_fcst %} +{%- endif %} + +{#- +Loop over the valid fields and set field names, levels, thresholds, and/ +or options for each field, both for forecasts and for obseratiions, in +the METplus configuration file. +#} +{%- set ns = namespace(var_count = 0) %} + +{#- +Loop over each field twice, the first treating the forecast field as +probabilistic and the second time as a scalar. +#} +{%- for treat_fcst_as_prob in [True, False] %} + + {%- for i in range(0,num_valid_fields) %} + +{#- +Add comment depending on whether or not the field is being treated +probabilistically. +#} + {%- if treat_fcst_as_prob %} +# FREQ +# Process as probability +# + {%- else %} +# +#Process as scalars for neighborhood methods +## Note that the number of forecast and obs thresholds must match +## but won't actually be applied to NBRCNT outputs with "nbrhd.field = OBS;" +# + {%- endif %} + + {%- set field_fcst = valid_fields_fcst[i] %} + {%- set field_obs = valid_fields_obs[i] %} + +{#- +For convenience, create lists of valid forecast and observation levels +for the current field. Then check that the number of valid levels for +forecasts is the same as that for observations. +#} + {%- set valid_levels_fcst = fields_levels_threshes_fcst[i][field_fcst].keys()|list %} + {%- set valid_levels_obs = fields_levels_threshes_obs[i][field_obs].keys()|list %} + +{#- +Extract dictionary of valid forecast levels (the dictionary keys) and +corresponding lists of valid thresholds (the values) for each level. +Then loop over these levels and corresponding lists of thresholds to set +both the forecast and observation field names, levels, thresholds, and/or +options. +#} + {%- set valid_levels_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst] %} + {%- for level_fcst, valid_threshes_fcst in valid_levels_threshes_fcst.items() %} + + {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %} + + {%- for thresh_fcst in valid_threshes_fcst %} + + {%- if (input_thresh_fcst == 'all') or (input_thresh_fcst == thresh_fcst) %} +{#- +Increment the METplus variable counter. +#} + {%- set ns.var_count = ns.var_count+1 %} + +{#- +Set forecast field name. Note that this has to exactly match the name +of the field in the input forecast file(s). + +The input forecast files are generated by the MET/METplus GenEnsProd +tool. That tool adds the field's level to the variable names in its +output file to ensure that all variables in the file have distinct names. +For example, if the same field, say APCP, is output at two different +levels, say at A3 and A6 (for APCP, "levels" are really accumulation +periods), there need to be two variables in the output file, and they +obviously can't both be named "APCP", so GenEnsProd names one "APCP_A3" +and the other "APCP_A6". Here, the level is stored in the variable +level_fcst and, below, is included in the name of the forecast field. + +For accumulated fields, the field name in the input forecast file contains +TWO references to the accumulation period. The first is the level of the +forecast field added by GenEnsProd as described above. The second is +another reference to this same level (accumulation period) but added by +the MET/METplus's PcpCombine tool (whose output file is the input into +GenEnsProd). PcpCombine adds this reference to the level (really the +accumulation period) to the field's name for the same reason that +GenEnsProd does, i.e. to ensure that the names of variables in the output +file are distinct. Here, this accumulation period is stored in the +variable accum_hh. Thus, for accumulated fields, below we add both +accum_hh and level_fcst to the field name to get an exact field name +match. +#} + {%- set thresh_fcst_and_or = thresh_fcst|replace("&&", ".and.") %} + {%- set thresh_fcst_and_or = thresh_fcst_and_or|replace("||", ".or.") %} + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{accum_hh}}_{{level_fcst}}_ENS_FREQ_{{thresh_fcst_and_or}} + {%- else %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{level_fcst}}_ENS_FREQ_{{thresh_fcst_and_or}} + {%- endif %} + +{#- +Set forecast field level. +#} +FCST_VAR{{ns.var_count}}_LEVELS = {{level_fcst}} + +{#- +Set forecast field threshold. +Note that since the forecast field being read in is actually a field of +probabilities, we set the forecast threshold to a probabilistic one +(thresh_fcst_prob) and not to the physical threshold (thresh_fcst) in +the dictionary of forecast field names, levels, and thresholds that we +are looping over. +#} +FCST_VAR{{ns.var_count}}_THRESH = {{thresh_fcst_prob}} + +{#- +Set forecast field options. +#} + {%- set opts_indent_len = 20 %} + {%- if (ns.var_count > 9) and (ns.var_count <= 99) %} + {%- set opts_indent_len = opts_indent_len + 1 %} + {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %} + {%- set opts_indent_len = opts_indent_len + 2 %} + {%- elif (ns.var_count > 999) %} + {%- set opts_indent_len = opts_indent_len + 3 %} + {%- endif %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if not treat_fcst_as_prob %} +FCST_VAR{{ns.var_count}}_OPTIONS = prob = FALSE; + {%- endif %} + +{#- +Set observation field name. Note that this has to exactly match the name +of the field in the input observation file. + +For accumulated fields, the input observation file is generated by MET's +PcpCombine tool. In that file, the field name consists of the observation +field name here (field_obs) with the accumulation period appended to it +(separated by an underscore), so we must do the same here to get an exact +match. + +Note: +Turns out for ASNOW, PcpCombine is not run for obs, so we exclude that +from the "if" clause here (so it goes into the "else"). For workflow +behavior uniformity between APCP and ASNOW, consider running PcpCombine +for ASNOW observations as well (just as it's run for APCP observations). + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +#} + {%- if (input_field_group in ['APCP']) %} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}}_{{accum_hh}} + {%- else %} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}} + {%- endif %} + +{#- +Set observation field level. +#} + {%- set indx_level_fcst = valid_levels_fcst.index(level_fcst) %} + {%- set level_obs = valid_levels_obs[indx_level_fcst] %} +OBS_VAR{{ns.var_count}}_LEVELS = {{level_obs}} + +{#- +Set observation field threshold. Note that no observation thresholds +are included in the METplus configuration file if input_thresh_fcst is +set to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +Set the list of valid observation thresholds to the one corresponding to +the current observation level (level_obs). +#} + {%- set valid_threshes_obs = fields_levels_threshes_obs[i][field_obs][level_obs] %} +{#- +Set the observation threshold. This is given by the element in the list +of valid observation thresholds that has the same index as that of the +current forcast threshold (thresh_fcst) in the list of valid forecast +thresholds. +#} + {%- set indx_thresh_fcst = valid_threshes_fcst.index(thresh_fcst) %} + {%- set thresh_obs = valid_threshes_obs[indx_thresh_fcst] %} +OBS_VAR{{ns.var_count}}_THRESH = {{thresh_obs}} + {%- endif %} + +{#- +Set observation field options. +#} + {%- set opts_indent_len = opts_indent_len - 1 %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'APCP' %} + + {%- if field_obs == 'APCP' %} + {%- if not treat_fcst_as_prob %} +OBS_VAR{{ns.var_count}}_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } + {%- endif %} + {%- endif %} + + {%- elif input_field_group == 'ASNOW' %} + + {%- if field_obs == 'ASNOW' %} + {%- if treat_fcst_as_prob %} +OBS_VAR{{ns.var_count}}_OPTIONS = convert(x) = 100.0*x; + {%- else %} +OBS_VAR{{ns.var_count}}_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }; +{{opts_indent}}convert(x) = 100.0*x; + {%- endif %} + {%- endif %} + + {%- elif input_field_group == 'REFC' %} + + {%- if field_obs == 'MergedReflectivityQCComposite' %} +OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = lt-20; +{{opts_indent}}censor_val = -20.0; +{{opts_indent}}cnt_thresh = [ >15 ]; +{{opts_indent}}cnt_logic = UNION; + {%- if not treat_fcst_as_prob %} +{{opts_indent}}nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } + {%- endif %} + {%- endif %} + + {%- elif input_field_group == 'RETOP' %} + + {%- if field_obs == 'EchoTop18' %} +OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = lt-20.0; +{{opts_indent}}censor_val = -20.0; +{{opts_indent}}cnt_thresh = [ >15 ]; +{{opts_indent}}cnt_logic = UNION; +{{opts_indent}}convert(x) = x * 3280.84 * 0.001; + {%- if not treat_fcst_as_prob %} +{{opts_indent}}nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } + {%- endif %} + {%- endif %} + + {%- endif %} +{#- +Print out a newline to separate the settings for the current field (both +forecast and observation settings) from those for the next field. +#} + {{- '\n' }} + + {%- endif %} + {%- endfor %} + + {%- endif %} + + {%- endfor %} + {%- endfor %} +{%- endfor %} +# +# Forecast data time window(s). +# +{%- set comment_or_null = '' %} +{%- set obs_window_abs_val = '0' %} +{%- if input_field_group in ['REFC', 'RETOP'] %} + {%- set comment_or_null = '#' %} + {%- set obs_window_abs_val = '300' %} +{%- endif %} +{{comment_or_null}}FCST_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_BEGIN = 0 +{{comment_or_null}}FCST_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_END = 0 +# +# Observation data time window(s). +# +{#- +Use integers for seconds, but int can be changed to float if there is a +need to go to sub-seconds. +#} +OBS_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_BEGIN = {{ 0 - obs_window_abs_val|int }} +OBS_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_END = {{ obs_window_abs_val|int }} + +# MET {{MetplusToolName}} neighborhood values +# See the MET User's Guide {{MetplusToolName}} section for more information +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_FIELD = + +# width value passed to nbrhd dictionary in the MET config file +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_WIDTH = + +# shape value passed to nbrhd dictionary in the MET config file +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_SHAPE = + +# cov thresh list passed to nbrhd dictionary in the MET config file +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_COV_THRESH = >=0.5 + +# Set to true to run {{MetplusToolName}} separately for each field specified +# Set to false to create one run of {{MetplusToolName}} per run time that +# includes all fields specified. +{{METPLUS_TOOL_NAME}}_ONCE_PER_FIELD = False +# +# Set to true if forecast data is probabilistic. +# +FCST_IS_PROB = True +FCST_PROB_IN_GRIB_PDS = False +# +# Only used if FCST_IS_PROB is true - sets probabilistic threshold +# +FCST_{{METPLUS_TOOL_NAME}}_PROB_THRESH = {{thresh_fcst_prob}} + +{{METPLUS_TOOL_NAME}}_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} + +# Climatology data +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FILE_NAME = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FIELD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_WIDTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_SHAPE = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_MATCH_MONTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_DAY_INTERVAL = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_HOUR_INTERVAL = + +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FILE_NAME = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FIELD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_WIDTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_SHAPE = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_TIME_INTERP_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_MATCH_MONTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_DAY_INTERVAL = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_HOUR_INTERVAL = + +{%- set comment_or_null = '' %} +{%- if input_field_group in ['APCP', 'ASNOW'] %} + {%- set comment_or_null = '#' %} +{%- endif %} + +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1 +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = True + +{{METPLUS_TOOL_NAME}}_MASK_GRID = + +# Statistical output types +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_FHO = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTC = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTS = NONE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTC = NONE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTS = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CNT = NONE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SL1L2 = NONE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SAL1L2 = NONE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VL1L2 = NONE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VAL1L2 = NONE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VCNT = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PCT = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PSTD = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PJC = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PRC = STAT +{%- if input_field_group in ['APCP', 'ASNOW'] %} +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV = BOTH +{%- elif input_field_group in ['REFC', 'RETOP'] %} +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV = NONE +{%- endif %} +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCTC = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCTS = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCNT = STAT +{%- if input_field_group in ['APCP', 'ASNOW'] %} +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_GRAD = BOTH +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_DMAP = NONE +{%- elif input_field_group in ['REFC', 'RETOP'] %} +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_GRAD = NONE +{%- endif %} + +# NetCDF matched pairs output file +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_VAR_NAME = +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_LATLON = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_RAW = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_DIFF = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_CLIMO = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_WEIGHT = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_NBRHD = FALSE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_FOURIER = FALSE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_GRADIENT = FALSE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_APPLY_MASK = FALSE + +# End of [config] section and start of [dir] section. +[dir] +# +# Directory containing observation input to {{MetplusToolName}}. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{obs_input_dir}} +# +# Directory containing forecast input to {{MetplusToolName}}. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}} +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR = +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR = +# +# Directory in which to write output from {{MetplusToolName}}. +# +# OUTPUT_BASE apparently has to be set to something; it cannot be left +# to its default value. But it is not explicitly used elsewhere in this +# configuration file. +# +OUTPUT_BASE = {{output_base}} +{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}} +# +# Directory for staging data. +# +STAGING_DIR = {{staging_dir}} + +# End of [dir] section and start of [filename_templates] section. +[filename_templates] +# +# Template for observation input to {{MetplusToolName}} relative to +# OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{obs_input_fn_template}} +# +# Template for forecast input to {{MetplusToolName}} relative to +# FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}} +# +# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR. +# +{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_TEMPLATE = +# +# Variable used to specify one or more verification mask files for +# {{MetplusToolName}}. Not used for this example. +# +{{METPLUS_TOOL_NAME}}_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensprob_APCP.conf b/parm/metplus/GridStat_ensprob_APCP.conf deleted file mode 100644 index 3e16de248d..0000000000 --- a/parm/metplus/GridStat_ensprob_APCP.conf +++ /dev/null @@ -1,362 +0,0 @@ -# Ensemble probabilistic GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -#GRID_STAT_INTERP_FIELD = BOTH -#GRID_STAT_INTERP_VLD_THRESH = 1.0 -#GRID_STAT_INTERP_SHAPE = SQUARE -#GRID_STAT_INTERP_TYPE_METHOD = NEAREST -#GRID_STAT_INTERP_TYPE_WIDTH = 1 - -#GRID_STAT_GRID_WEIGHT_FLAG = -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as ensemble-probabilistic. -# This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensprob -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; - -{%- set field_thresholds = [] %} -{%- if accum_hh == '01' %} - {%- set field_thresholds = ['gt0.0', 'ge0.254', 'ge0.508', 'ge2.54'] %} -{%- elif accum_hh == '03' %} - {%- set field_thresholds = ['gt0.0', 'ge0.508', 'ge2.54', 'ge6.350'] %} -{%- elif accum_hh == '06' %} - {%- set field_thresholds = ['gt0.0', 'ge2.54', 'ge6.350', 'ge12.700'] %} -{%- elif accum_hh == '24' %} - {%- set field_thresholds = ['gt0.0', 'ge6.350', 'ge12.700', 'ge25.400'] %} -{%- endif %} -# -# List of forecast and corresponding observation fields to process. -# -# FREQ -# Process as probability -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[0]}} -FCST_VAR1_LEVELS = A{{accum_hh}} -FCST_VAR1_THRESH = ==0.1 -OBS_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR1_LEVELS = A{{accum_hh}} -OBS_VAR1_THRESH = {{field_thresholds[0]}} - -FCST_VAR2_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[1]}} -FCST_VAR2_LEVELS = A{{accum_hh}} -FCST_VAR2_THRESH = ==0.1 -OBS_VAR2_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR2_LEVELS = A{{accum_hh}} -OBS_VAR2_THRESH = {{field_thresholds[1]}} - -FCST_VAR3_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[2]}} -FCST_VAR3_LEVELS = A{{accum_hh}} -FCST_VAR3_THRESH = ==0.1 -OBS_VAR3_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR3_LEVELS = A{{accum_hh}} -OBS_VAR3_THRESH = {{field_thresholds[2]}} - -FCST_VAR4_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[3]}} -FCST_VAR4_LEVELS = A{{accum_hh}} -FCST_VAR4_THRESH = ==0.1 -OBS_VAR4_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR4_LEVELS = A{{accum_hh}} -OBS_VAR4_THRESH = {{field_thresholds[3]}} - -# -#Process as scalars for neighborhood methods -## Note that the number of forecast and obs thresholds must match -## but won't actually be applied to NBRCNT outputs with "nbrhd.field = OBS;" -# -FCST_VAR5_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[0]}} -FCST_VAR5_LEVELS = A{{accum_hh}} -FCST_VAR5_THRESH = ==0.1 -FCST_VAR5_OPTIONS = prob = FALSE; -OBS_VAR5_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR5_LEVELS = A{{accum_hh}} -OBS_VAR5_THRESH = {{field_thresholds[0]}} -OBS_VAR5_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR6_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[1]}} -FCST_VAR6_LEVELS = A{{accum_hh}} -FCST_VAR6_THRESH = ==0.1 -FCST_VAR6_OPTIONS = prob = FALSE; -OBS_VAR6_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR6_LEVELS = A{{accum_hh}} -OBS_VAR6_THRESH = {{field_thresholds[1]}} -OBS_VAR6_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR7_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[2]}} -FCST_VAR7_LEVELS = A{{accum_hh}} -FCST_VAR7_THRESH = ==0.1 -FCST_VAR7_OPTIONS = prob = FALSE; -OBS_VAR7_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR7_LEVELS = A{{accum_hh}} -OBS_VAR7_THRESH = {{field_thresholds[2]}} -OBS_VAR7_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR8_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[3]}} -FCST_VAR8_LEVELS = A{{accum_hh}} -FCST_VAR8_THRESH = ==0.1 -FCST_VAR8_OPTIONS = prob = FALSE; -OBS_VAR8_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR8_LEVELS = A{{accum_hh}} -OBS_VAR8_THRESH = {{field_thresholds[3]}} -OBS_VAR8_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -# -# Forecast data time window(s). -# -FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = 0 -OBS_GRID_STAT_FILE_WINDOW_END = 0 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = True -FCST_PROB_IN_GRIB_PDS = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = NONE -GRID_STAT_OUTPUT_FLAG_CTC = NONE -GRID_STAT_OUTPUT_FLAG_CTS = NONE -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = NONE -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -GRID_STAT_OUTPUT_FLAG_PCT = STAT -GRID_STAT_OUTPUT_FLAG_PSTD = STAT -GRID_STAT_OUTPUT_FLAG_PJC = STAT -GRID_STAT_OUTPUT_FLAG_PRC = STAT -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = NONE -GRID_STAT_OUTPUT_FLAG_NBRCTS = NONE -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensprob_ASNOW.conf b/parm/metplus/GridStat_ensprob_ASNOW.conf deleted file mode 100644 index ecd17f681b..0000000000 --- a/parm/metplus/GridStat_ensprob_ASNOW.conf +++ /dev/null @@ -1,384 +0,0 @@ -# Ensemble probabilistic GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -#GRID_STAT_INTERP_FIELD = BOTH -#GRID_STAT_INTERP_VLD_THRESH = 1.0 -#GRID_STAT_INTERP_SHAPE = SQUARE -#GRID_STAT_INTERP_TYPE_METHOD = NEAREST -#GRID_STAT_INTERP_TYPE_WIDTH = 1 - -#GRID_STAT_GRID_WEIGHT_FLAG = -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as ensemble-probabilistic. -# This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensprob -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; - -{%- set field_thresholds = [] %} -{%- if accum_hh == '06' %} - {%- set field_thresholds = ['gt0.0', 'ge2.54', 'ge5.08', 'ge10.16', 'ge20.32'] %} -{%- elif accum_hh == '24' %} - {%- set field_thresholds = ['gt0.0', 'ge2.54', 'ge5.08', 'ge10.16', 'ge20.32'] %} -{%- endif %} -# -# List of forecast and corresponding observation fields to process. -# -# FREQ -# Process as probability -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[0]}} -FCST_VAR1_LEVELS = A{{accum_hh}} -FCST_VAR1_THRESH = ==0.1 -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = A{{accum_hh}} -OBS_VAR1_THRESH = {{field_thresholds[0]}} -OBS_VAR1_OPTIONS = convert(x) = 100.0*x; - -FCST_VAR2_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[1]}} -FCST_VAR2_LEVELS = A{{accum_hh}} -FCST_VAR2_THRESH = ==0.1 -OBS_VAR2_NAME = {{fieldname_in_obs_input}} -OBS_VAR2_LEVELS = A{{accum_hh}} -OBS_VAR2_THRESH = {{field_thresholds[1]}} -OBS_VAR2_OPTIONS = convert(x) = 100.0*x; - -FCST_VAR3_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[2]}} -FCST_VAR3_LEVELS = A{{accum_hh}} -FCST_VAR3_THRESH = ==0.1 -OBS_VAR3_NAME = {{fieldname_in_obs_input}} -OBS_VAR3_LEVELS = A{{accum_hh}} -OBS_VAR3_THRESH = {{field_thresholds[2]}} -OBS_VAR3_OPTIONS = convert(x) = 100.0*x; - -FCST_VAR4_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[3]}} -FCST_VAR4_LEVELS = A{{accum_hh}} -FCST_VAR4_THRESH = ==0.1 -OBS_VAR4_NAME = {{fieldname_in_obs_input}} -OBS_VAR4_LEVELS = A{{accum_hh}} -OBS_VAR4_THRESH = {{field_thresholds[3]}} -OBS_VAR4_OPTIONS = convert(x) = 100.0*x; - -FCST_VAR5_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[4]}} -FCST_VAR5_LEVELS = A{{accum_hh}} -FCST_VAR5_THRESH = ==0.1 -OBS_VAR5_NAME = {{fieldname_in_obs_input}} -OBS_VAR5_LEVELS = A{{accum_hh}} -OBS_VAR5_THRESH = {{field_thresholds[4]}} -OBS_VAR5_OPTIONS = convert(x) = 100.0*x; - -# -#Process as scalars for neighborhood methods -## Note that the number of forecast and obs thresholds must match -## but won't actually be applied to NBRCNT outputs with "nbrhd.field = OBS;" -# -FCST_VAR6_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[0]}} -FCST_VAR6_LEVELS = A{{accum_hh}} -FCST_VAR6_THRESH = ==0.1 -FCST_VAR6_OPTIONS = prob = FALSE; -OBS_VAR6_NAME = {{fieldname_in_obs_input}} -OBS_VAR6_LEVELS = A{{accum_hh}} -OBS_VAR6_THRESH = {{field_thresholds[0]}} -OBS_VAR6_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }; - convert(x) = 100.0*x; - -FCST_VAR7_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[1]}} -FCST_VAR7_LEVELS = A{{accum_hh}} -FCST_VAR7_THRESH = ==0.1 -FCST_VAR7_OPTIONS = prob = FALSE; -OBS_VAR7_NAME = {{fieldname_in_obs_input}} -OBS_VAR7_LEVELS = A{{accum_hh}} -OBS_VAR7_THRESH = {{field_thresholds[1]}} -OBS_VAR7_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }; - convert(x) = 100.0*x; - -FCST_VAR8_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[2]}} -FCST_VAR8_LEVELS = A{{accum_hh}} -FCST_VAR8_THRESH = ==0.1 -FCST_VAR8_OPTIONS = prob = FALSE; -OBS_VAR8_NAME = {{fieldname_in_obs_input}} -OBS_VAR8_LEVELS = A{{accum_hh}} -OBS_VAR8_THRESH = {{field_thresholds[2]}} -OBS_VAR8_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }; - convert(x) = 100.0*x; - -FCST_VAR9_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[3]}} -FCST_VAR9_LEVELS = A{{accum_hh}} -FCST_VAR9_THRESH = ==0.1 -FCST_VAR9_OPTIONS = prob = FALSE; -OBS_VAR9_NAME = {{fieldname_in_obs_input}} -OBS_VAR9_LEVELS = A{{accum_hh}} -OBS_VAR9_THRESH = {{field_thresholds[3]}} -OBS_VAR9_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }; - convert(x) = 100.0*x; - -FCST_VAR10_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[4]}} -FCST_VAR10_LEVELS = A{{accum_hh}} -FCST_VAR10_THRESH = ==0.1 -FCST_VAR10_OPTIONS = prob = FALSE; -OBS_VAR10_NAME = {{fieldname_in_obs_input}} -OBS_VAR10_LEVELS = A{{accum_hh}} -OBS_VAR10_THRESH = {{field_thresholds[4]}} -OBS_VAR10_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }; - convert(x) = 100.0*x; - -# -# Forecast data time window(s). -# -FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = 0 -OBS_GRID_STAT_FILE_WINDOW_END = 0 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = True -FCST_PROB_IN_GRIB_PDS = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = NONE -GRID_STAT_OUTPUT_FLAG_CTC = NONE -GRID_STAT_OUTPUT_FLAG_CTS = NONE -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = NONE -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -GRID_STAT_OUTPUT_FLAG_PCT = STAT -GRID_STAT_OUTPUT_FLAG_PSTD = STAT -GRID_STAT_OUTPUT_FLAG_PJC = STAT -GRID_STAT_OUTPUT_FLAG_PRC = STAT -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = NONE -GRID_STAT_OUTPUT_FLAG_NBRCTS = NONE -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensprob_REFC.conf b/parm/metplus/GridStat_ensprob_REFC.conf deleted file mode 100644 index 95e19af1ce..0000000000 --- a/parm/metplus/GridStat_ensprob_REFC.conf +++ /dev/null @@ -1,382 +0,0 @@ -# Ensemble probabilistic GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -GRID_STAT_INTERP_FIELD = NONE -GRID_STAT_INTERP_VLD_THRESH = 1.0 -GRID_STAT_INTERP_SHAPE = SQUARE -GRID_STAT_INTERP_TYPE_METHOD = NEAREST -GRID_STAT_INTERP_TYPE_WIDTH = 1 - -GRID_STAT_GRID_WEIGHT_FLAG = NONE -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as ensemble-probabilistic. -# This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensprob -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# List of forecast and corresponding observation fields to process. -# -# FREQ -# Process as probability -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge20 -FCST_VAR1_LEVELS = L0 -FCST_VAR1_THRESH = ==0.1 -OBS_VAR1_NAME = MergedReflectivityQCComposite -OBS_VAR1_LEVELS = Z500 -OBS_VAR1_THRESH = ge20 -OBS_VAR1_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - -FCST_VAR2_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge30 -FCST_VAR2_LEVELS = L0 -FCST_VAR2_THRESH = ==0.1 -OBS_VAR2_NAME = MergedReflectivityQCComposite -OBS_VAR2_LEVELS = Z500 -OBS_VAR2_THRESH = ge30 -OBS_VAR2_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - -FCST_VAR3_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge40 -FCST_VAR3_LEVELS = L0 -FCST_VAR3_THRESH = ==0.1 -OBS_VAR3_NAME = MergedReflectivityQCComposite -OBS_VAR3_LEVELS = Z500 -OBS_VAR3_THRESH = ge40 -OBS_VAR3_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - -FCST_VAR4_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge50 -FCST_VAR4_LEVELS = L0 -FCST_VAR4_THRESH = ==0.1 -OBS_VAR4_NAME = MergedReflectivityQCComposite -OBS_VAR4_LEVELS = Z500 -OBS_VAR4_THRESH = ge50 -OBS_VAR4_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - -# -#Process as scalars for neighborhood methods -## Note that the number of forecast and obs thresholds must match -## but won't actually be applied to NBRCNT outputs with "nbrhd.field = OBS;" -# -FCST_VAR5_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge20 -FCST_VAR5_LEVELS = L0 -FCST_VAR5_THRESH = ==0.1 -FCST_VAR5_OPTIONS = prob = FALSE; -OBS_VAR5_NAME = MergedReflectivityQCComposite -OBS_VAR5_LEVELS = Z500 -OBS_VAR5_THRESH = ge20 -OBS_VAR5_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR6_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge30 -FCST_VAR6_LEVELS = L0 -FCST_VAR6_THRESH = ==0.1 -FCST_VAR6_OPTIONS = prob = FALSE; -OBS_VAR6_NAME = MergedReflectivityQCComposite -OBS_VAR6_LEVELS = Z500 -OBS_VAR6_THRESH = ge30 -OBS_VAR6_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR7_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge40 -FCST_VAR7_LEVELS = L0 -FCST_VAR7_THRESH = ==0.1 -FCST_VAR7_OPTIONS = prob = FALSE; -OBS_VAR7_NAME = MergedReflectivityQCComposite -OBS_VAR7_LEVELS = Z500 -OBS_VAR7_THRESH = ge40 -OBS_VAR7_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR8_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge50 -FCST_VAR8_LEVELS = L0 -FCST_VAR8_THRESH = ==0.1 -FCST_VAR8_OPTIONS = prob = FALSE; -OBS_VAR8_NAME = MergedReflectivityQCComposite -OBS_VAR8_LEVELS = Z500 -OBS_VAR8_THRESH = ge50 -OBS_VAR8_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -# -# Forecast data time window(s). -# -#FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -#FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300 -OBS_GRID_STAT_FILE_WINDOW_END = 300 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = True -FCST_PROB_IN_GRIB_PDS = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = NONE -GRID_STAT_OUTPUT_FLAG_CTC = NONE -GRID_STAT_OUTPUT_FLAG_CTS = NONE -GRID_STAT_OUTPUT_FLAG_MCTC = NONE -GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = NONE -GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -GRID_STAT_OUTPUT_FLAG_VCNT = NONE -GRID_STAT_OUTPUT_FLAG_PCT = STAT -GRID_STAT_OUTPUT_FLAG_PSTD = STAT -GRID_STAT_OUTPUT_FLAG_PJC = STAT -GRID_STAT_OUTPUT_FLAG_PRC = STAT -GRID_STAT_OUTPUT_FLAG_ECLV = NONE -GRID_STAT_OUTPUT_FLAG_NBRCTC = NONE -GRID_STAT_OUTPUT_FLAG_NBRCTS = NONE -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -GRID_STAT_OUTPUT_FLAG_GRAD = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensprob_RETOP.conf b/parm/metplus/GridStat_ensprob_RETOP.conf deleted file mode 100644 index d1f218bea8..0000000000 --- a/parm/metplus/GridStat_ensprob_RETOP.conf +++ /dev/null @@ -1,390 +0,0 @@ -# Ensemble probabilistic GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -GRID_STAT_INTERP_FIELD = NONE -GRID_STAT_INTERP_VLD_THRESH = 1.0 -GRID_STAT_INTERP_SHAPE = SQUARE -GRID_STAT_INTERP_TYPE_METHOD = NEAREST -GRID_STAT_INTERP_TYPE_WIDTH = 1 - -GRID_STAT_GRID_WEIGHT_FLAG = NONE -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as ensemble-probabilistic. -# This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensprob -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# List of forecast and corresponding observation fields to process. -# -# FREQ -# Process as probability -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge20 -FCST_VAR1_LEVELS = L0 -FCST_VAR1_THRESH = ==0.1 -OBS_VAR1_NAME = EchoTop18 -OBS_VAR1_LEVELS = Z500 -OBS_VAR1_THRESH = ge20 -OBS_VAR1_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; - -FCST_VAR2_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge30 -FCST_VAR2_LEVELS = L0 -FCST_VAR2_THRESH = ==0.1 -OBS_VAR2_NAME = EchoTop18 -OBS_VAR2_LEVELS = Z500 -OBS_VAR2_THRESH = ge30 -OBS_VAR2_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; - -FCST_VAR3_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge40 -FCST_VAR3_LEVELS = L0 -FCST_VAR3_THRESH = ==0.1 -OBS_VAR3_NAME = EchoTop18 -OBS_VAR3_LEVELS = Z500 -OBS_VAR3_THRESH = ge40 -OBS_VAR3_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; - -FCST_VAR4_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge50 -FCST_VAR4_LEVELS = L0 -FCST_VAR4_THRESH = ==0.1 -OBS_VAR4_NAME = EchoTop18 -OBS_VAR4_LEVELS = Z500 -OBS_VAR4_THRESH = ge50 -OBS_VAR4_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; - -# -#Process as scalars for neighborhood methods -## Note that the number of forecast and obs thresholds must match -## but won't actually be applied to NBRCNT outputs with "nbrhd.field = OBS;" -# -FCST_VAR5_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge20 -FCST_VAR5_LEVELS = L0 -FCST_VAR5_THRESH = ==0.1 -FCST_VAR5_OPTIONS = prob = FALSE; -OBS_VAR5_NAME = EchoTop18 -OBS_VAR5_LEVELS = Z500 -OBS_VAR5_THRESH = ge20 -OBS_VAR5_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; - nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR6_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge30 -FCST_VAR6_LEVELS = L0 -FCST_VAR6_THRESH = ==0.1 -FCST_VAR6_OPTIONS = prob = FALSE; -OBS_VAR6_NAME = EchoTop18 -OBS_VAR6_LEVELS = Z500 -OBS_VAR6_THRESH = ge30 -OBS_VAR6_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; - nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR7_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge40 -FCST_VAR7_LEVELS = L0 -FCST_VAR7_THRESH = ==0.1 -FCST_VAR7_OPTIONS = prob = FALSE; -OBS_VAR7_NAME = EchoTop18 -OBS_VAR7_LEVELS = Z500 -OBS_VAR7_THRESH = ge40 -OBS_VAR7_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; - nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR8_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge50 -FCST_VAR8_LEVELS = L0 -FCST_VAR8_THRESH = ==0.1 -FCST_VAR8_OPTIONS = prob = FALSE; -OBS_VAR8_NAME = EchoTop18 -OBS_VAR8_LEVELS = Z500 -OBS_VAR8_THRESH = ge50 -OBS_VAR8_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; - nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -# -# Forecast data time window(s). -# -#FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -#FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300 -OBS_GRID_STAT_FILE_WINDOW_END = 300 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = True -FCST_PROB_IN_GRIB_PDS = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = NONE -GRID_STAT_OUTPUT_FLAG_CTC = NONE -GRID_STAT_OUTPUT_FLAG_CTS = NONE -GRID_STAT_OUTPUT_FLAG_MCTC = NONE -GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = NONE -GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -GRID_STAT_OUTPUT_FLAG_VCNT = NONE -GRID_STAT_OUTPUT_FLAG_PCT = STAT -GRID_STAT_OUTPUT_FLAG_PSTD = STAT -GRID_STAT_OUTPUT_FLAG_PJC = STAT -GRID_STAT_OUTPUT_FLAG_PRC = STAT -GRID_STAT_OUTPUT_FLAG_ECLV = NONE -GRID_STAT_OUTPUT_FLAG_NBRCTC = NONE -GRID_STAT_OUTPUT_FLAG_NBRCTS = NONE -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -GRID_STAT_OUTPUT_FLAG_GRAD = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_or_PointStat.conf b/parm/metplus/GridStat_or_PointStat.conf new file mode 100644 index 0000000000..c90783862b --- /dev/null +++ b/parm/metplus/GridStat_or_PointStat.conf @@ -0,0 +1,940 @@ +# {{MetplusToolName}} METplus Configuration + +[config] + +# List of applications (tools) to run. +PROCESS_LIST = {{MetplusToolName}} + +# time looping - options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +LOOP_BY = INIT + +# Format of INIT_BEG and INIT_END using % items +# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. +# see www.strftime.org for more information +# %Y%m%d%H expands to YYYYMMDDHH +INIT_TIME_FMT = %Y%m%d%H + +# Start time for METplus run - must match INIT_TIME_FMT +INIT_BEG = {{cdate}} + +# End time for METplus run - must match INIT_TIME_FMT +INIT_END = {{cdate}} + +# Increment between METplus runs (in seconds if no units are specified). +# Must be >= 60 seconds. +INIT_INCREMENT = 3600 + +# List of forecast leads to process for each run time (init or valid) +# In hours if units are not specified +# If unset, defaults to 0 (don't loop through forecast leads) +LEAD_SEQ = {{fhr_list}} +# +# Order of loops to process data - Options are times, processes +# Not relevant if only one item is in the PROCESS_LIST +# times = run all wrappers in the PROCESS_LIST for a single run time, then +# increment the run time and run all wrappers again until all times have +# been evaluated. +# processes = run the first wrapper in the PROCESS_LIST for all times +# specified, then repeat for the next item in the PROCESS_LIST until all +# wrappers have been run +# +LOOP_ORDER = times +# +# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. +# +LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} +# +# Specify the name of the METplus log file. +# +LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} +# +# Specify the location and name of the final METplus conf file. +# +METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}} +# +# Location of MET configuration file to pass to {{MetplusToolName}}. +# +# References PARM_BASE, which is the location of the parm directory +# corresponding to the ush directory of the run_metplus.py script that +# is called or the value of the environment variable METPLUS_PARM_BASE +# if set. +# +{{METPLUS_TOOL_NAME}}_CONFIG_FILE = {PARM_BASE}/met_config/{{MetplusToolName}}Config_wrapped + +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} +# +# Grid to remap data. Value is set as the 'to_grid' variable in the +# 'regrid' dictionary. See MET User's Guide for more information. +# +{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = FCST +{{METPLUS_TOOL_NAME}}_REGRID_VLD_THRESH = 0.5 +{{METPLUS_TOOL_NAME}}_REGRID_METHOD = BUDGET +{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2 +{{METPLUS_TOOL_NAME}}_REGRID_SHAPE = SQUARE +{%- elif (METPLUS_TOOL_NAME == 'POINT_STAT') %} +# +# Regrid to specified grid. Indicate NONE if no regridding, or the grid id +# (e.g. G212) +# +{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = NONE +{{METPLUS_TOOL_NAME}}_REGRID_METHOD = BILIN +{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2 +{%- endif %} + +{%- if (METPLUS_TOOL_NAME == 'POINT_STAT') %} +# +{{METPLUS_TOOL_NAME}}_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA +#{{METPLUS_TOOL_NAME}}_OBS_QUALITY_EXC = +{%- endif %} + +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} + + {%- if (input_field_group == 'APCP') %} + +#{{METPLUS_TOOL_NAME}}_INTERP_FIELD = BOTH +#{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = 1.0 +#{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = SQUARE +#{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = NEAREST +#{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 1 + +#{{METPLUS_TOOL_NAME}}_GRID_WEIGHT_FLAG = + {%- elif input_field_group in ['REFC', 'RETOP'] %} + +{{METPLUS_TOOL_NAME}}_INTERP_FIELD = NONE +{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = 1.0 +{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = SQUARE +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = NEAREST +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 1 + +{{METPLUS_TOOL_NAME}}_GRID_WEIGHT_FLAG = NONE + {%- endif %} + +{%- elif (METPLUS_TOOL_NAME == 'POINT_STAT') %} + +#{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = BILIN +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 2 + +{%- endif %} +# +# Name to identify model (forecast) data in output. +# +# The variable MODEL is recorded in the stat files, and the data in +# these files is then plotted (e.g. using METViewer). Here, we add a +# suffix to MODEL that identifies the forecast ensemble member. This +# makes it easier to identify each curve. +# +MODEL = {{vx_fcst_model_name}}_{{ensmem_name}} +# +# Name to identify observation data in output. +# +OBTYPE = {{obtype}} +# +# Value to enter under the DESC column in the output stat file. Here, +# we store the value of the original lead in this column, i.e. the lead +# with zero corresponding to the actual start time of the forecast (which +# is (cdate - time_lag)), not to cdate. This is just the lead in +# LEAD_SEQ with the time lag (time_lag) of the current forecast member +# added on. +# +# Uncomment this line only after upgrading to METplus 5.x. +#{{METPLUS_TOOL_NAME}}_DESC = {lead?fmt=%H%M%S?shift={{time_lag}}} +{{METPLUS_TOOL_NAME}}_DESC = NA +# +# Verification Masking regions +# Indicate which grid and polygon masking region, if applicable +# +{{METPLUS_TOOL_NAME}}_MASK_GRID = + +{%- if (METPLUS_TOOL_NAME == 'POINT_STAT') %} +# +# List of full path to poly masking files. NOTE: Only short lists of poly +# files work (those that fit on one line), a long list will result in an +# environment variable that is too long, resulting in an error. For long +# lists of poly masking files (i.e. all the mask files in the NCEP_mask +# directory), define these in the METplus {{MetplusToolName}} configuration file. +# +{{METPLUS_TOOL_NAME}}_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly +{{METPLUS_TOOL_NAME}}_STATION_ID = + +# Message types, if all message types are to be returned, leave this empty, +# otherwise indicate the message types of interest. +{{METPLUS_TOOL_NAME}}_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} +{%- endif %} +{%- set overrides_indent_len = 0 %} +{%- set overrides_indent = '' %} +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} + {%- set overrides_indent_len = 33 %} + {%- set overrides_indent = ' '*overrides_indent_len %} +# +# Overrides of MET configuration defaults. +# +{{METPLUS_TOOL_NAME}}_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; +{{overrides_indent}}cnt_thresh = [NA]; +{{overrides_indent}}cnt_logic = UNION; +{{overrides_indent}}wind_thresh = [NA]; +{{overrides_indent}}wind_logic = UNION; +{{overrides_indent}}ci_alpha = [0.05]; +{{overrides_indent}}rank_corr_flag = FALSE; +{%- endif %} +# +# List of forecast and corresponding observation fields to process. +# +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} + {%- if input_field_group in ['APCP', 'ASNOW'] %} +# Note that for accumulated fields such as APCP and ASNOW, in the input +# forecast and observation files (which are generated by MET's PcpCombine +# tool) the accumulation period is appended to the field name, so the +# same is done here. +# + {%- endif %} +{%- endif %} +# Note on use of set_attr_lead and ensemble member time-lagging: +# ------------------------------------------------------------- +# The set_attr_lead parameter appearing below in [FCST|OBS]_VAR_OPTIONS +# specifies the lead to use both in naming of the output .stat and .nc +# files and for setting the lead values contained in those files. This +# option causes MET/METplus to use the lead values in the variable LEAD_SEQ +# set above, which are the same for all ensemble forecast members (i.e. +# regardless of whether members are time lagged with respect to the +# nominal cycle date specified by cdate). If set_attr_lead were not +# specified as below, then MET/METplus would get the lead from the input +# forecast file, and that would in general differ from one ensemble member +# to the next depending on whether the member is time-lagged. That would +# cause confusion, so here, we always use lead values with zero lead +# corresponding to the nominal cdate. +# +{#- +Import the file containing jinja macros. +#} +{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %} + +{#- +Jinja requires certain variables to be defined globally within the template +before they can be used in if-statements and other scopes (see Jinja +scoping rules). Define such variables. +#} +{%- set levels_fcst = '' %} +{%- set levels_obs = '' %} +{%- set indx_input_level_fcst = '' %} + +{%- set valid_threshes_fcst = [] %} +{%- set valid_threshes_obs = [] %} +{%- set threshes_fcst = [] %} +{%- set threshes_obs = [] %} +{%- set indx_input_thresh_fcst = '' %} + +{%- set opts_indent = '' %} +{%- set opts_indent_len = '' %} +{%- set tmp = '' %} +{%- set error_msg = '' %} +{#- +Make sure that the set of field groups for forecasts and observations +are identical. +#} +{%- set fgs_fcst = vx_config_dict['fcst'].keys()|list %} +{%- set fgs_obs = vx_config_dict['obs'].keys()|list %} +{%- if (fgs_fcst != fgs_obs) %} + {%- set error_msg = '\n' ~ +'The list of valid field groups for forecasts (fgs_fcst) must be identical\n' ~ +'to that for observations (fgs_obs) but isn\'t:\n' ~ +' fgs_fcst = ' ~ fgs_fcst ~ '\n' ~ +' fgs_obs = ' ~ fgs_obs %} + {{metplus_macros.print_err_and_quit(error_msg)}} +{%- endif %} + +{#- +Extract the lists of forecast and observation dictionaries containing +the valid fields, levels, and thresholds corresponding to the specified +field group (input_field_group). Note that it would be simpler to have +these be just dictionaries in which the keys are the field names (instead +of them being LISTS of dictionaries in which each dictionary contains a +single key that is the field name), but that approach cannot be used here +because it is possible for field names to be repeated (for both forecasts +and observations). For example, in the observations, the field name +'PRWE' appears more than once, each time with a different threshold, and +the combination of name and threshold is what constitutes a unique field, +not just the name by itself. +#} +{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %} +{%- set fields_levels_threshes_obs = vx_config_dict['obs'][input_field_group] %} + +{#- +Reset the specified forecast level so that if it happens to be an +accumulation (e.g. 'A03'), the leading zeros in front of the hour are +stipped out (e.g. reset to 'A3'). +#} +{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} + +{#- +Ensure that the specified input forecast level(s) (input_level_fcst) and +threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the +set(s) of valid forecast levels and thresholds, respectively, specified +in fields_levels_threshes_fcst. +#} +{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }} +{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }} + +{#- +For convenience, create lists of valid forecast and observation field +names. +#} +{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %} +{%- set valid_fields_fcst = [] %} +{%- for i in range(0,num_valid_fields_fcst) %} + {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %} + {%- set tmp = valid_fields_fcst.append(field) %} +{%- endfor %} + +{%- set valid_fields_obs = [] %} +{%- set num_valid_fields_obs = fields_levels_threshes_obs|length %} +{%- for i in range(0,num_valid_fields_obs) %} + {%- set field = fields_levels_threshes_obs[i].keys()|list|join('') %} + {%- set tmp = valid_fields_obs.append(field) %} +{%- endfor %} + +{#- +Ensure that the number of valid fields for forecasts is equal to that +for the observations. +#} +{%- set num_valid_fields = 0 %} +{%- if (num_valid_fields_fcst != num_valid_fields_obs) %} + {%- set error_msg = '\n' ~ +'The number of valid forecast fields (num_valid_fields_fcst) must be\n' ~ +'equal to the number of valid observation fields (num_valid_fields_obs)\n' ~ +'but isn\'t:\n' ~ +' num_valid_fields_fcst = ' ~ num_valid_fields_fcst ~ '\n' ~ +' num_valid_fields_obs = ' ~ num_valid_fields_obs ~ '\n' ~ +'The lists of valid forecast and observation fields are:\n' ~ +' valid_fields_fcst = ' ~ valid_fields_fcst ~ '\n' ~ +' valid_fields_obs = ' ~ valid_fields_obs ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} +{%- else %} + {%- set num_valid_fields = num_valid_fields_fcst %} +{%- endif %} + +{#- +Loop over the valid fields and set field names, levels, thresholds, and/ +or options for each field, both for forecasts and for obseratiions, in +the METplus configuration file. +#} +{%- set ns = namespace(var_count = 0) %} +{%- for i in range(0,num_valid_fields) %} + + {%- set field_fcst = valid_fields_fcst[i] %} + {%- set field_obs = valid_fields_obs[i] %} + +{#- +For convenience, create lists of valid forecast and observation levels +for the current field. Then check that the number of valid levels for +forecasts is the same as that for observations. +#} + {%- set valid_levels_fcst = fields_levels_threshes_fcst[i][field_fcst].keys()|list %} + {%- set valid_levels_obs = fields_levels_threshes_obs[i][field_obs].keys()|list %} + + {%- set num_valid_levels = 0 %} + {%- set num_valid_levels_fcst = valid_levels_fcst|length %} + {%- set num_valid_levels_obs = valid_levels_obs|length %} + {%- if (num_valid_levels_fcst != num_valid_levels_obs) %} + {%- set error_msg = '\n' ~ +'The number of valid forecast levels (num_valid_levels_fcst) must be\n' ~ +'equal to the number of valid observation levels (num_valid_levels_obs)\n' ~ +'but isn\'t:\n' ~ +' num_valid_levels_fcst = ' ~ num_valid_levels_fcst ~ '\n' ~ +' num_valid_levels_obs = ' ~ num_valid_levels_obs ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- else %} + {%- set num_valid_levels = num_valid_levels_fcst %} + {%- endif %} + +{#- +Make sure that input_level_fcst is set to a valid value. +#} + {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %} + {%- set error_msg = '\n' ~ +'The input forecast level (input_level_fcst) must either be set to \'all\',\n' ~ +'or it must be set to one of the elements in the list of valid levels\n' ~ +'(valid_levels_fcst) for the current forecast field (field_fcst). This\n' ~ +'is not the case:\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' input_level_fcst = ' ~ input_level_fcst ~ '\n' ~ +' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + +{#- +Increment the METplus variable counter. +#} + {%- set ns.var_count = ns.var_count+1 %} + +{#- +Set forecast field name. Note that this has to exactly match the name +of the field in the input forecast file. + +For accumulated fields, the input forecast file is generated by MET's +PcpCombine tool. In that file, the field name consists of the forecast +field name here (field_fcst) with the accumulation period appended to +it (separated by an underscore), so we must do the same here to get an +exact match. +#} + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{accum_hh}} + {%- else %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}} + {%- endif %} + +{#- +Set forecast field level(s). +#} + {%- if (input_level_fcst == 'all') %} + {%- set levels_fcst = valid_levels_fcst %} +{#- +If input_level_fcst is set to a specific value: + 1) Ensure that input_level_fcst exists in the list of valid forecast + levels. + 2) Get the index of input_level_fcst in the list of valid forecast + levels. This will be needed later below when setting the observation + level(s). + 3) Use this index to set the forecast level to a one-element list + containing the specified forecast level. +#} + {%- else %} + {%- if input_level_fcst not in valid_levels_fcst %} + {%- set error_msg = '\n' ~ +'For the current forecast field (field_fcst), the input forecast level\n' ~ +'(input_level_fcst) does not exist in the list of valid forecast levels\n' ~ +'(valid_levels_fcst):\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' input_level_fcst = ' ~ input_level_fcst ~ '\n' ~ +' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + {%- set indx_input_level_fcst = valid_levels_fcst.index(input_level_fcst) %} + {%- set levels_fcst = [valid_levels_fcst[indx_input_level_fcst]] %} + {%- endif %} +FCST_VAR{{ns.var_count}}_LEVELS = {{levels_fcst|join(', ')}} + +{#- +Set forecast field threshold(s). Note that no forecast thresholds are +included in the METplus configuration file if input_thresh_fcst is set +to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +If input_level_fcst is set to 'all' and there is more than one (forecast +or observation) level to be verified for the current (forecast or +observation) field, then the list of forecast thresholds for each forecast +level must be identical to every other. Check for this. Note that this +restriction includes the order of the thresholds, i.e. the set of +thresholds for each level must be in the same order as for all other +levels. +#} + {%- if (input_level_fcst == 'all') and (num_valid_levels > 1) %} + {{- metplus_macros.check_for_identical_threshes_by_level( + field_fcst, fields_levels_threshes_fcst[i]) }} + {%- endif %} +{#- +Now set the list of valid forecast thresholds to the one corresponding +to the first (zeroth) forecast level in the list of forecast levels set +above. We can do this because, for the case of a single forecast level, +there is only one list of forecast thresholds to consider (the first +one), and for the case of all levels, all levels have the same set of +thresholds (as verified by the check above). +#} + {%- set valid_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst][levels_fcst[0]] %} +{#- +If input_thresh_fcst is set to 'all', set the list of forecast thresholds +to the full set of valid values. +#} + {%- if (input_thresh_fcst == 'all') %} + + {%- set threshes_fcst = valid_threshes_fcst %} +{#- +If input_thresh_fcst is set to a specific value: + 1) Ensure that input_thresh_fcst exists in the list of valid forecast + thresholds. + 2) Get the index of input_thresh_fcst in the list of valid forecast + thresholds. This will be needed later below when setting the + observation threshold(s). + 3) Use this index to set the forecast threshold to a one-element list + containing the specified forecast threshold. +#} + {%- else %} + + {%- if input_thresh_fcst not in valid_threshes_fcst %} + {%- set error_msg = '\n' ~ +'For the current forecast field (field_fcst) and list of forecast level(s)\n' ~ +'(levels_fcst), the input forecast threshold (input_thresh_fcst) does not\n' ~ +'exist in the list of valid forecast thresholds (valid_threshes_fcst):\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' levels_fcst = ' ~ levels_fcst ~ '\n' ~ +' valid_threshes_fcst = ' ~ valid_threshes_fcst ~ '\n' ~ +' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %} + {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %} + + {%- endif %} +{#- +If threshes_fcst has been reset to something other than its default +value of an empty list, then set the forecast thresholds in the METplus +configuration file because that implies threshes_fcst was set above to +a non-empty value. Then reset threshes_fcst to its default value for +proper processing of thresholds for the next field. +#} + {%- if (threshes_fcst != []) %} +FCST_VAR{{ns.var_count}}_THRESH = {{threshes_fcst|join(', ')}} + {%- endif %} + {%- set threshes_fcst = [] %} + + {%- endif %} + +{#- +Set forecast field options. +#} +FCST_VAR{{ns.var_count}}_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; + + {%- set opts_indent_len = 20 %} + {%- if (ns.var_count > 9) and (ns.var_count <= 99) %} + {%- set opts_indent_len = opts_indent_len + 1 %} + {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %} + {%- set opts_indent_len = opts_indent_len + 2 %} + {%- elif (ns.var_count > 999) %} + {%- set opts_indent_len = opts_indent_len + 3 %} + {%- endif %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if (input_field_group == 'REFC') %} + + {%- if (field_fcst == 'REFC') %} +{{opts_indent}}cnt_thresh = [ >15 ]; +{{opts_indent}}cnt_logic = UNION; + {%- endif %} + + {%- elif (input_field_group == 'RETOP') %} + + {%- if (field_fcst == 'RETOP') %} +{{opts_indent}}convert(x) = x * 3.28084 * 0.001; +{{opts_indent}}cnt_thresh = [ >0 ]; +{{opts_indent}}cnt_logic = UNION; + {%- endif %} + + {%- elif (input_field_group == 'ADPSFC') %} + + {%- if (field_fcst in ['WIND']) %} +{{opts_indent}}GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind. + {%- elif (field_fcst in ['TCDC']) %} +{{opts_indent}}GRIB_lvl_typ = 200; +{{opts_indent}}GRIB2_ipdtmpl_index=[27]; +{{opts_indent}}GRIB2_ipdtmpl_val=[255]; +{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- elif (field_fcst in ['VIS']) %} +{{opts_indent}}censor_thresh = [>16090]; +{{opts_indent}}censor_val = [16090]; +{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- elif (field_fcst in ['HGT']) %} +{{opts_indent}}GRIB_lvl_typ = 215; +{{opts_indent}}desc = "CEILING"; + {%- endif %} + + {%- elif (input_field_group == 'ADPUPA') %} + + {%- if (field_fcst in ['HGT']) %} + {%- if (levels_fcst[0] in ['L0']) %} +{{opts_indent}}GRIB_lvl_typ = 220; + {%- endif %} + {%- elif (field_fcst in ['CAPE']) %} +{{opts_indent}}cnt_thresh = [ >0 ]; + {%- endif %} + + {%- endif %} + +{#- +Set observation field name. Note that this has to exactly match the name +of the field in the input observation file. + +For accumulated fields, the input observation file is generated by MET's +PcpCombine tool. In that file, the field name consists of the observation +field name here (field_obs) with the accumulation period appended to it +(separated by an underscore), so we must do the same here to get an exact +match. + +Note: +Turns out for ASNOW, PcpCombine is not run for obs, so we exclude that +from the "if" clause here (so it goes into the "else"). For workflow +behavior uniformity between APCP and ASNOW, consider running PcpCombine +for ASNOW observations as well (just as it's run for APCP observations). + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +#} + {%- if (input_field_group in ['APCP']) %} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}}_{{accum_hh}} + {%- else %} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}} + {%- endif %} + +{#- +Set observation field level(s). +#} + {%- if (input_level_fcst == 'all') %} + {%- set levels_obs = valid_levels_obs %} +{#- +If input_level_fcst is set to a specific forecast level, then the +observation level is given by the element in the list of valid observation +levels that has the same index as that of input_level_fcst in the list +of valid forecast levels. +#} + {%- else %} + {%- set levels_obs = [valid_levels_obs[indx_input_level_fcst]] %} + {%- endif %} +OBS_VAR{{ns.var_count}}_LEVELS = {{levels_obs|join(', ')}} + +{#- +Set observation field threshold(s). Note that no observation thresholds +are included in the METplus configuration file if input_thresh_fcst is +set to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +If input_level_fcst is set to 'all' and there is more than one (forecast +or observation) level to be verified for the current (forecast or +observation) field, then the list of observation thresholds for each +observation level must be identical to every other. Check for this. +Note that this restriction includes the order of the thresholds, i.e. +the set of thresholds for each level must be in the same order as for +all other levels. +#} + {%- if (input_level_fcst == 'all') and (num_valid_levels > 1) %} + {{- metplus_macros.check_for_identical_threshes_by_level( + field_obs, fields_levels_threshes_obs[i]) }} + {%- endif %} +{#- +Now set the list of valid observation thresholds to the one corresponding +to the first (zeroth) observation level in the list of observation levels +set above. We can do this because, for the case of a single observaton +level, there is only one list of observation thresholds to consider (the +first one), and for the case of all levels, all levels have the same set +of thresholds (as verified by the check above). +#} + {%- set valid_threshes_obs = fields_levels_threshes_obs[i][field_obs][levels_obs[0]] %} +{#- +If input_thresh_fcst is set to 'all', set the list of observation thresholds +to the full set of valid values. +#} + {%- if (input_thresh_fcst == 'all') %} + + {%- set threshes_obs = valid_threshes_obs %} +{#- +If input_thresh_fcst is set to a specific forecast threshold, then the +observation threshold is given by the element in the list of valid +observation thresholds that has the same index as that of input_thresh_fcst +in the list of valid forecast thresholds. +#} + {%- else %} + + {%- set threshes_obs = [valid_threshes_obs[indx_input_thresh_fcst]] %} + + {%- endif %} +{#- +If threshes_obs has been reset to something other than its default value +of an empty list, then set the observation thresholds in the METplus +configuration file because that implies threshes_obs was set above to +a non-empty value. Then reset threshes_obs to its default value for +proper processing of thresholds for the next field. +#} + {%- if (threshes_obs != []) %} +OBS_VAR{{ns.var_count}}_THRESH = {{threshes_obs|join(', ')}} + {%- endif %} + {%- set threshes_obs = [] %} + + {%- endif %} + +{#- +Set observation field options. +#} + {%- set opts_indent_len = opts_indent_len - 1 %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if (input_field_group == 'ASNOW') %} + + {%- if (field_obs == 'ASNOW') %} +OBS_VAR{{ns.var_count}}_OPTIONS = convert(x) = 100.0*x; + {%- endif %} + + {%- elif (input_field_group == 'REFC') %} + + {%- if (field_obs == 'MergedReflectivityQCComposite') %} +OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = [eq-999, <-20]; +{{opts_indent}}censor_val = [-9999, -20]; +{{opts_indent}}cnt_thresh = [ >15 ]; +{{opts_indent}}cnt_logic = UNION; + {%- endif %} + + {%- elif (input_field_group == 'RETOP') %} + + {%- if (field_obs in ['EchoTop18']) %} +OBS_VAR{{ns.var_count}}_OPTIONS = convert(x) = x * 3280.84 * 0.001; +{{opts_indent}}censor_thresh = [<=-9.84252,eq-3.28084]; +{{opts_indent}}censor_val = [-9999,-16.4042]; +{{opts_indent}}cnt_thresh = [ >0 ]; +{{opts_indent}}cnt_logic = UNION; + {%- endif %} + + {%- elif (input_field_group == 'ADPSFC') %} + + {%- if (field_obs in ['WIND']) %} +OBS_VAR{{ns.var_count}}_OPTIONS = GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind. + {%- elif (field_obs in ['VIS']) %} +OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = [>16090]; +{{opts_indent}}censor_val = [16090]; +{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- elif (field_obs in ['CEILING']) %} +OBS_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 215; +{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- endif %} + + {%- elif (input_field_group == 'ADPUPA') %} + + {%- if (field_obs in ['CAPE', 'MLCAPE']) %} +OBS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; +{{opts_indent}}cnt_logic = UNION; + {%- elif (field_obs in ['PBL']) %} + {%- if (field_fcst in ['HPBL']) %} +OBS_VAR{{ns.var_count}}_OPTIONS = desc = "TKE"; + {%- elif (field_fcst in ['HGT']) %} +OBS_VAR{{ns.var_count}}_OPTIONS = desc = "RI"; + {%- endif %} + {%- endif %} + + {%- endif %} + +{#- +Print out a newline to separate the settings for the current field (both +forecast and observation settings) from those for the next field. +#} + {{- '\n' }} + +{%- endfor %} + +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +# +# Forecast data time window(s). +# +FCST_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_BEGIN = 0 +FCST_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_END = 0 + {%- endif %} +{%- endif %} +# +# Observation data time window(s). +# +{%- set obs_window_begin = 0 %} +{%- set obs_window_end = 0 %} +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} + {%- if (input_field_group in ['REFC', 'RETOP']) %} + {%- set obs_window_begin = -300 %} + {%- set obs_window_end = 300 %} + {%- endif %} +OBS_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_BEGIN = {{obs_window_begin}} +OBS_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_END = {{obs_window_end}} +{%- elif (METPLUS_TOOL_NAME == 'POINT_STAT') %} + {%- set obs_window_begin = -1799 %} + {%- set obs_window_end = 1800 %} +OBS_WINDOW_BEGIN = {{obs_window_begin}} +OBS_WINDOW_END = {{obs_window_end}} +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_END = {OBS_WINDOW_END} +# +# Optional list of offsets to look for point observation data +# +{{METPLUS_TOOL_NAME}}_OFFSETS = 0 +{%- endif %} + +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} + +# MET {{MetplusToolName}} neighborhood values +# See the MET User's Guide {{MetplusToolName}} section for more information +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_FIELD = BOTH + +# width value passed to nbrhd dictionary in the MET config file +{%- if (input_field_group in ['APCP']) %} +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_WIDTH = 3,5,7 +{%- elif (input_field_group in ['ASNOW']) %} +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_WIDTH = 5 +{%- elif (input_field_group in ['REFC', 'RETOP']) %} +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_WIDTH = 1,3,5,7 +{%- endif %} + +# shape value passed to nbrhd dictionary in the MET config file +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_SHAPE = SQUARE + +# cov thresh list passed to nbrhd dictionary in the MET config file +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_COV_THRESH = >=0.5 +{%- endif %} +# +# Set to True to run {{MetplusToolName}} separately for each field specified; +# set to False to run {{MetplusToolName}} once per run time that includes all +# fields specified. +# +{{METPLUS_TOOL_NAME}}_ONCE_PER_FIELD = False + +{{METPLUS_TOOL_NAME}}_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} + +# Climatology data +{%- set comment_or_null = '' %} +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = + {%- if (input_field_group in ['APCP', 'ASNOW']) %} + {%- set comment_or_null = '#' %} + {%- endif %} + +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1 +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = True +{%- elif (METPLUS_TOOL_NAME == 'POINT_STAT') %} +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST + +{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1 +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = True +{%- endif %} + +# Statistical output types +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_FHO = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTC = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTS = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTC = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTS = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CNT = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SL1L2 = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SAL1L2 = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VL1L2 = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VAL1L2 = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VCNT = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PCT = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PSTD = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PJC = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PRC = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV = BOTH +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCTC = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCTS = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCNT = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_GRAD = BOTH +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_DMAP = NONE +{%- elif (METPLUS_TOOL_NAME == 'POINT_STAT') %} +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_FHO = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTC = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTS = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTS = +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CNT = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SL1L2 = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SAL1L2 = +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VL1L2 = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VAL1L2 = +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VCNT = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PCT = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PSTD = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PJC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PRC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECNT = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_RPS = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MPR = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ORANK = +{%- endif %} + +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} + +# NetCDF matched pairs output file +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_VAR_NAME = +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_LATLON = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_RAW = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_DIFF = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_CLIMO = FALSE +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_CLIMO_CDP = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_WEIGHT = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_NBRHD = FALSE +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_FOURIER = FALSE +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_GRADIENT = FALSE +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_APPLY_MASK = FALSE +{%- endif %} + +# End of [config] section and start of [dir] section. +[dir] +# +# Directory containing observation input to {{MetplusToolName}}. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{obs_input_dir}} +# +# Directory containing forecast input to {{MetplusToolName}}. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}} +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR = +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR = +# +# Directory in which to write output from {{MetplusToolName}}. +# +# OUTPUT_BASE apparently has to be set to something; it cannot be left +# to its default value. But it is not explicitly used elsewhere in this +# configuration file. +# +OUTPUT_BASE = {{output_base}} +{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}} +# +# Directory for staging data. +# +STAGING_DIR = {{staging_dir}} + +# End of [dir] section and start of [filename_templates] section. +[filename_templates] +# +# Template for observation input to {{MetplusToolName}} relative to +# OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{obs_input_fn_template}} +# +# Template for forecast input to {{MetplusToolName}} relative to +# FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}} +# +# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR. +# +{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_TEMPLATE = +{#- +Not sure if the following section for ..._VERIFICATION_MASK_TEMPLATE +is also nececcary for PointStat. +#} +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} +# +# Variable used to specify one or more verification mask files for +# {{MetplusToolName}}. Not used for this example. +# +{{METPLUS_TOOL_NAME}}_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly +{%- endif %} diff --git a/parm/metplus/PcpCombine.conf b/parm/metplus/PcpCombine.conf new file mode 100644 index 0000000000..3cee69df1d --- /dev/null +++ b/parm/metplus/PcpCombine.conf @@ -0,0 +1,216 @@ +{%- if FCST_OR_OBS == 'FCST' -%} +# PcpCombine METplus Configuration for Forecasts +{%- elif FCST_OR_OBS == 'OBS' -%} +# PcpCombine METplus Configuration for Observations +{%- endif %} + +[config] + +# List of applications (tools) to run. +PROCESS_LIST = PcpCombine + +# time looping - options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +LOOP_BY = INIT + +# Format of INIT_BEG and INIT_END using % items +# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. +# see www.strftime.org for more information +# %Y%m%d%H expands to YYYYMMDDHH +INIT_TIME_FMT = %Y%m%d%H + +# Start time for METplus run - must match INIT_TIME_FMT +INIT_BEG = {{cdate}} + +# End time for METplus run - must match INIT_TIME_FMT +INIT_END = {{cdate}} + +# Increment between METplus runs (in seconds if no units are specified). +# Must be >= 60 seconds. +INIT_INCREMENT = 3600 + +# List of forecast leads to process for each run time (init or valid) +# In hours if units are not specified +# If unset, defaults to 0 (don't loop through forecast leads) +LEAD_SEQ = {{fhr_list}} + +# Order of loops to process data - Options are times, processes +# Not relevant if only one item is in the PROCESS_LIST +# times = run all wrappers in the PROCESS_LIST for a single run time, then +# increment the run time and run all wrappers again until all times have +# been evaluated. +# processes = run the first wrapper in the PROCESS_LIST for all times +# specified, then repeat for the next item in the PROCESS_LIST until all +# wrappers have been run +LOOP_ORDER = times +# +# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. +# +LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} +# +# Specify the name of the METplus log file. +# +LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} +# +# Specify the location and name of the final METplus conf file. +# +METPLUS_CONF = {% raw %}{{% endraw %}{{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} + +{%- if FCST_OR_OBS == 'FCST' %} +# +# Run PcpCombine on forecast data but not observations (observation input +# files are not provided). +# +OBS_PCP_COMBINE_RUN = False +FCST_PCP_COMBINE_RUN = True +{%- elif FCST_OR_OBS == 'OBS' %} +# +# Run PcpCombine on observation data but not forecasts (forecast input +# files are not provided). +# +OBS_PCP_COMBINE_RUN = True +FCST_PCP_COMBINE_RUN = False +{%- endif %} +# +# Mode of PcpCombine to use (SUM, ADD, SUBTRACT, DERIVE, or CUSTOM). +# +{{FCST_OR_OBS}}_PCP_COMBINE_METHOD = ADD + +{%- if (FCST_OR_OBS == 'FCST') and (input_field_group == 'ASNOW') %} +# +# Specify name of variable for Snowfall Accumulation. +# NOTE: Currently TSNOWP is used which is a constant-density estimate of snowfall accumulation. +# In future RRFS development, a GSL product with variable-density snowfall accumulation +# is planned for UPP. When that is included and turned on in post, this variable may be changed +# to ASNOW. +# +FCST_PCP_COMBINE_INPUT_NAMES = TSNOWP + +FCST_PCP_COMBINE_INPUT_LEVELS = A01 +{%- endif %} +# +# Specify how to name the array in the NetCDF file that PcpCombine +# generates. +# +# For accumulation variables (which is the only type of variable that we +# run PcpCombine on), we add the accumulation period to the variable name +# because this is how METplus normally sets names. This is because, +# epending on the settings in the METplus configuration file, it is +# possible for a single NetCDF output file to contain output for multiple +# accumulations, so even though the "level" attribute of each accumulation +# variable in the output file will contain the level (e.g. "A1" or "A3"), +# the variable names for say the 1-hour and 3-hour accumulations would be +# the same (e.g. both would be "APCP"), which is not allowed and/or would +# cause overwriting of data. To avoid this, METplus includes the level +# as part of the variable name, so we do the same here (even though in +# our case, it is not required because there will only be one variable in +# the output NetCDF file). +# +{%- if (input_field_group in ['APCP', 'ASNOW']) %} +{{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_NAME = {{fieldname_in_met_output}}_{{accum_hh}} +{%- else %} +{{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_NAME = {{fieldname_in_met_output}} +{%- endif %} +# +# Accumulation interval available in the input data. +# +{{FCST_OR_OBS}}_PCP_COMBINE_INPUT_ACCUMS = 01 +# +# Accumulation interval to generate in the output file. +# +{{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_ACCUM = {{accum_hh}} +# +# If the output NetCDF file already exists, specify whether or not to +# skip the call to PcpCombine. +# +# In general, relaunching a task in the SRW App should recreate all the +# output from that task regardless of whether or not that output already +# exists. This is the case when running the PcpCombine task on forecasts. +# Thus, for forecasts, we set the skip flag to False. However, it turns +# out that when running PcpCombine on observations, it is necessary to +# skip the call to PcpCombine (i.e. NOT recreate output files) because +# in the SRW App's workflow, more than one cycle may want to create the +# same output observation file. This can happen if the forecast periods +# from two or more forecasts overlap, e.g. forecast 1 starts at 00Z of +# day one and forecast 2 starts at 00Z of day 2, and the forecasts are +# both 36 hours long, so the last 12 hours of forecast 1 overlap with the +# first 12 hours of forecast 2. In this case, there will be two workflow +# tasks that will try to create the observation APCP files for those 12 +# hours, and the files will be named exactly the same (because the output +# naming convention in this conf file is based on valid times). Thus, in +# order to avoid (1) duplicating work and (2) having two tasks accidentally +# trying to write to the same file (which will cause at least one task to +# fail), when running PcpCombine on observations we want to skip the call +# if the output observation file(s) (for a given forecast hour) already +# exist. For this reason, for observations we set the skip flag to False +# but set it to True for forecasts. +# +{%- if FCST_OR_OBS == 'FCST' %} +# Since this METplus configuration file takes forecast files as inputs, +# we set this flag to False. +# +PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS = False +{%- elif FCST_OR_OBS == 'OBS' %} +# Since this METplus configuration file takes observation files as inputs, +# we set this flag to True. +# +PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS = True +{%- endif %} + +{%- if FCST_OR_OBS == 'FCST' %} +# +# Maximum forecast lead to allow when searching for model data to use in +# PcpCombine. Default is a very large time (4000 years) so setting this +# to a valid maximum value can speed up execution time of numerous runs. +# +FCST_PCP_COMBINE_MAX_FORECAST = 2d +# +# Keep initialization time constant. +# +FCST_PCP_COMBINE_CONSTANT_INIT = True +{%- endif %} + +{%- if FCST_OR_OBS == 'OBS' %} +# +# Name to identify observation data in output. +# +OBTYPE = CCPA +{%- endif %} +# +# Specify file type of input data. +# +{{FCST_OR_OBS}}_PCP_COMBINE_INPUT_DATATYPE = GRIB + +# End of [config] section and start of [dir] section. +[dir] +# +# Directory containing input files. +# +{{FCST_OR_OBS}}_PCP_COMBINE_INPUT_DIR = {{input_dir}} +# +# Directory in which to write output from PcpCombine. +# +# OUTPUT_BASE apparently has to be set to something; it cannot be left +# to its default value. But it is not explicitly used elsewhere in this +# configuration file. +# +OUTPUT_BASE = {{output_base}} +{{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_DIR = {{output_dir}} +# +# Directory for staging data. +# +STAGING_DIR = {{staging_dir}} + +# End of [dir] section and start of [filename_templates] section. +[filename_templates] +# +# Input file name template relative to {{FCST_OR_OBS}}_PCP_COMBINE_INPUT_DIR. +# +{{FCST_OR_OBS}}_PCP_COMBINE_INPUT_TEMPLATE = {{input_fn_template}} +# +# Output file name template relative to {{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_DIR. +# +{{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_TEMPLATE = {{output_fn_template}} diff --git a/parm/metplus/PcpCombine_fcst_APCP.conf b/parm/metplus/PcpCombine_fcst_APCP.conf deleted file mode 100644 index 64fe0b4fcf..0000000000 --- a/parm/metplus/PcpCombine_fcst_APCP.conf +++ /dev/null @@ -1,130 +0,0 @@ -# PcpCombine METplus Configuration for Forecasts - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PcpCombine - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} - -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {FCST_PCP_COMBINE_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Run PcpCombine on forecast data but not observation (observation input -# files are not provided). -# -OBS_PCP_COMBINE_RUN = False -FCST_PCP_COMBINE_RUN = True -# -# Mode of PcpCombine to use (SUM, ADD, SUBTRACT, DERIVE, or CUSTOM). -# -FCST_PCP_COMBINE_METHOD = ADD -# -# Specify how to name the array in the NetCDF file that PcpCombine -# generates. -# -FCST_PCP_COMBINE_OUTPUT_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -# -# Accumulation interval available in the forecast input data. -# -FCST_PCP_COMBINE_INPUT_ACCUMS = 01 -# -# Accumulation interval to generate in the output file. -# -FCST_PCP_COMBINE_OUTPUT_ACCUM = {{accum_hh}} -# -# If the "bucket" output NetCDF file already exists, DON'T skip the call -# to PcpCombine. -# -# In general, we want to recreate the files when the SRW App workflow -# task that uses this METplus configuration file is relaunched. -# -PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS = False - -# Maximum forecast lead to allow when searching for model data to use in -# PcpCombine. Default is a very large time (4000 years) so setting this -# to a valid maximum value can speed up execution time of numerous runs. -FCST_PCP_COMBINE_MAX_FORECAST = 2d - -# Keep initialization time constant. -FCST_PCP_COMBINE_CONSTANT_INIT = True - -FCST_PCP_COMBINE_INPUT_DATATYPE = GRIB - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing forecast input to PcpCombine. -# -FCST_PCP_COMBINE_INPUT_DIR = {{fcst_input_dir}} -# -# Directory in which to write output from PcpCombine. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -FCST_PCP_COMBINE_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for forecast input to PcpCombine relative to -# FCST_PCP_COMBINE_INPUT_DIR. -# -FCST_PCP_COMBINE_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from PcpCombine relative to -# FCST_PCP_COMBINE_OUTPUT_DIR. -# -FCST_PCP_COMBINE_OUTPUT_TEMPLATE = {{output_fn_template}} diff --git a/parm/metplus/PcpCombine_fcst_ASNOW.conf b/parm/metplus/PcpCombine_fcst_ASNOW.conf deleted file mode 100644 index 91a6a70abb..0000000000 --- a/parm/metplus/PcpCombine_fcst_ASNOW.conf +++ /dev/null @@ -1,141 +0,0 @@ -# PcpCombine METplus Configuration for Forecasts - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PcpCombine - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} - -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {FCST_PCP_COMBINE_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Run PcpCombine on forecast data but not observation (observation input -# files are not provided). -# -OBS_PCP_COMBINE_RUN = False -FCST_PCP_COMBINE_RUN = True -# -# Mode of PcpCombine to use (SUM, ADD, SUBTRACT, DERIVE, or CUSTOM). -# -FCST_PCP_COMBINE_METHOD = ADD -# -# Specify name of variable for Snowfall Accumulation. -# NOTE: Currently TSNOWP is used which is a constant-density estimate of snowfall accumulation. -# In future RRFS development, a GSL product with variable-density snowfall accumulation -# is planned for UPP. When that is included and turned on in post, this variable may be changed -# to ASNOW. -# -FCST_PCP_COMBINE_INPUT_NAMES=TSNOWP - -FCST_PCP_COMBINE_INPUT_LEVELS = A01 -# -# Specify how to name the array in the NetCDF file that PcpCombine -# generates. -# -FCST_PCP_COMBINE_OUTPUT_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -# -# Accumulation interval available in the forecast input data. -# -FCST_PCP_COMBINE_INPUT_ACCUMS = 01 -# -# Accumulation interval to generate in the output file. -# -FCST_PCP_COMBINE_OUTPUT_ACCUM = {{accum_hh}} - -# If the "bucket" output NetCDF file already exists, DON'T skip the call -# to PcpCombine. -# -# In general, we want to recreate the files when the SRW App workflow -# task that uses this METplus configuration file is relaunched. -# -PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS = False - -# Maximum forecast lead to allow when searching for model data to use in -# PcpCombine. Default is a very large time (4000 years) so setting this -# to a valid maximum value can speed up execution time of numerous runs. -FCST_PCP_COMBINE_MAX_FORECAST = 2d - -# Keep initialization time constant. -FCST_PCP_COMBINE_CONSTANT_INIT = True - -FCST_PCP_COMBINE_INPUT_DATATYPE = GRIB -#FCST_NATIVE_DATA_TYPE = GRIB - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing forecast input to PcpCombine. -# -FCST_PCP_COMBINE_INPUT_DIR = {{fcst_input_dir}} -# -# Directory in which to write output from PcpCombine. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -FCST_PCP_COMBINE_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for forecast input to PcpCombine relative to -# FCST_PCP_COMBINE_INPUT_DIR. -# -FCST_PCP_COMBINE_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from PCPCOMBINE relative to -# FCST_PCP_COMBINE_OUTPUT_DIR. -# -FCST_PCP_COMBINE_OUTPUT_TEMPLATE = {{output_fn_template}} diff --git a/parm/metplus/PcpCombine_obs_APCP.conf b/parm/metplus/PcpCombine_obs_APCP.conf deleted file mode 100644 index cea6809597..0000000000 --- a/parm/metplus/PcpCombine_obs_APCP.conf +++ /dev/null @@ -1,139 +0,0 @@ -# PcpCombine METplus Configuration for Observations - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PcpCombine - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} - -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {OBS_PCP_COMBINE_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Run PcpCombine on observation data but not forecast (forecast input -# files are not provided). -# -OBS_PCP_COMBINE_RUN = True -FCST_PCP_COMBINE_RUN = False -# -# Mode of PcpCombine to use (SUM, ADD, SUBTRACT, DERIVE, or CUSTOM). -# -OBS_PCP_COMBINE_METHOD = ADD -# -# Specify how to name the array in the NetCDF file that PcpCombine -# generates. -# -OBS_PCP_COMBINE_OUTPUT_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -# -# Accumulation interval available in the observation input data. -# -OBS_PCP_COMBINE_INPUT_ACCUMS = 01 -# -# Accumulation interval to generate in the output file. -# -OBS_PCP_COMBINE_OUTPUT_ACCUM = {{accum_hh}} -# -# If the "bucket" output NetCDF file already exists, skip the call to -# PcpCombine. -# -# In general, we want to recreate the files when the SRW App workflow -# task that uses this METplus configuration file is relaunched. In this -# case, however, it is necessary to skip the call to PcpCombine because -# in the SRW App's workflow, more than one cycle may want to create the -# same file. This can happen if the forecast periods from two or more -# forecasts overlap, e.g. forecast 1 starts at 00Z of day one and forecast -# 2 starts at 00Z of day 2, and the forecasts are both 36 hours long, so -# the last 12 hours of forecast 1 overlap with the first 12 hours of -# forecast 2. In this case, there will be two workflow tasks that will -# try to create the observation APCP files for those 12 hours, and the -# files will be named exactly the same (because the output naming convention -# in this conf file uses valid times). In order to (1) avoid duplicating -# work and (2) having two tasks accidentally trying to write to the same -# file (which will cause at least one task to fail), we do not call -# PcpCombine if the output file (for a given forecast hour) already -# exists. -# -PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS = True -# -# Name to identify observation data in output. -# -OBTYPE = CCPA -OBS_PCP_COMBINE_INPUT_DATA_TYPE = GRIB - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to PcpCombine. -# -OBS_PCP_COMBINE_INPUT_DIR = {{obs_input_dir}} -# -# Directory in which to write output from PcpCombine. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -OBS_PCP_COMBINE_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to PcpCombine relative to -# OBS_PCP_COMBINE_INPUT_DIR. -# -OBS_PCP_COMBINE_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for output from PcpCombine relative to -# OBS_PCP_COMBINE_OUTPUT_DIR. -# -OBS_PCP_COMBINE_OUTPUT_TEMPLATE = {{output_fn_template}} diff --git a/parm/metplus/PointStat_ADPSFC.conf b/parm/metplus/PointStat_ADPSFC.conf deleted file mode 100644 index 6d94e0bed9..0000000000 --- a/parm/metplus/PointStat_ADPSFC.conf +++ /dev/null @@ -1,378 +0,0 @@ -# PointStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PointStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {POINT_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to PointStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -POINT_STAT_CONFIG_FILE = {PARM_BASE}/met_config/PointStatConfig_wrapped - -POINT_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA -#POINT_STAT_OBS_QUALITY_EXC = - -POINT_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST -#POINT_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = - -#POINT_STAT_INTERP_VLD_THRESH = -#POINT_STAT_INTERP_SHAPE = -POINT_STAT_INTERP_TYPE_METHOD = BILIN -POINT_STAT_INTERP_TYPE_WIDTH = 2 - -POINT_STAT_OUTPUT_FLAG_FHO = STAT -POINT_STAT_OUTPUT_FLAG_CTC = STAT -POINT_STAT_OUTPUT_FLAG_CTS = STAT -#POINT_STAT_OUTPUT_FLAG_MCTC = -#POINT_STAT_OUTPUT_FLAG_MCTS = -POINT_STAT_OUTPUT_FLAG_CNT = STAT -POINT_STAT_OUTPUT_FLAG_SL1L2 = STAT -#POINT_STAT_OUTPUT_FLAG_SAL1L2 = -POINT_STAT_OUTPUT_FLAG_VL1L2 = STAT -#POINT_STAT_OUTPUT_FLAG_VAL1L2 = -POINT_STAT_OUTPUT_FLAG_VCNT = STAT -#POINT_STAT_OUTPUT_FLAG_PCT = -#POINT_STAT_OUTPUT_FLAG_PSTD = -#POINT_STAT_OUTPUT_FLAG_PJC = -#POINT_STAT_OUTPUT_FLAG_PRC = -#POINT_STAT_OUTPUT_FLAG_ECNT = -#POINT_STAT_OUTPUT_FLAG_RPS = -#POINT_STAT_OUTPUT_FLAG_ECLV = -#POINT_STAT_OUTPUT_FLAG_MPR = -#POINT_STAT_OUTPUT_FLAG_ORANK = - -POINT_STAT_CLIMO_CDF_BINS = 1 -#POINT_STAT_CLIMO_CDF_CENTER_BINS = False -#POINT_STAT_CLIMO_CDF_WRITE_BINS = True - -#POINT_STAT_HSS_EC_VALUE = - -# -# Observation data time window(s). -# -OBS_WINDOW_BEGIN = -1799 -OBS_WINDOW_END = 1800 -OBS_POINT_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} -OBS_POINT_STAT_WINDOW_END = {OBS_WINDOW_END} - -# Optional list of offsets to look for point observation data -POINT_STAT_OFFSETS = 0 -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the forecast ensemble member. This -# makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_{{ensmem_name}} -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. Here, -# we store the value of the original lead in this column, i.e. the lead -# with zero corresponding to the actual start time of the forecast (which -# is (cdate - time_lag)), not to cdate. This is just the lead in -# LEAD_SEQ with the time lag (time_lag) of the current forecast member -# added on. -# -# Uncomment this line only after upgrading to METplus 5.x. -#POINT_STAT_DESC = {lead?fmt=%H%M%S?shift={{time_lag}}} -POINT_STAT_DESC = NA - -# Regrid to specified grid. Indicate NONE if no regridding, or the grid id -# (e.g. G212) -POINT_STAT_REGRID_TO_GRID = NONE -POINT_STAT_REGRID_METHOD = BILIN -POINT_STAT_REGRID_WIDTH = 2 - -POINT_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# sets the -obs_valid_beg command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H} - -# sets the -obs_valid_end command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_END = {valid?fmt=%Y%m%d_%H} - -# Verification Masking regions -# Indicate which grid and polygon masking region, if applicable -POINT_STAT_GRID = - -# List of full path to poly masking files. NOTE: Only short lists of poly -# files work (those that fit on one line), a long list will result in an -# environment variable that is too long, resulting in an error. For long -# lists of poly masking files (i.e. all the mask files in the NCEP_mask -# directory), define these in the METplus PointStat configuration file. -POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly -POINT_STAT_STATION_ID = - -# Message types, if all message types are to be returned, leave this empty, -# otherwise indicate the message types of interest. -POINT_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} - -# set to True to run PointStat once for each name/level combination -# set to False to run PointStat once per run time including all fields -POINT_STAT_ONCE_PER_FIELD = False -# -# List of forecast and corresponding observation fields to process. -# -# Note on use of set_attr_lead and ensemble member time-lagging: -# ------------------------------------------------------------- -# The set_attr_lead parameter appearing below in [FCST|OBS]_VAR_OPTIONS -# specifies the lead to use both in naming of the output .stat and .nc -# files and for setting the lead values contained in those files. This -# option causes MET/METplus to use the lead values in the variable LEAD_SEQ -# set above, which are the same for all ensemble forecast members (i.e. -# regardless of whether members are time lagged with respect to the -# nominal cycle date specified by cdate). If set_attr_lead were not -# specified as below, then MET/METplus would get the lead from the input -# forecast file, and that would in general differ from one ensemble member -# to the next depending on whether the member is time-lagged. That would -# cause confusion, so here, we always use lead values with zero lead -# corresponding to the nominal cdate. -# -FCST_VAR1_NAME = TMP -FCST_VAR1_LEVELS = Z2 -FCST_VAR1_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR1_NAME = TMP -OBS_VAR1_LEVELS = Z2 - -FCST_VAR2_NAME = DPT -FCST_VAR2_LEVELS = Z2 -FCST_VAR2_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR2_NAME = DPT -OBS_VAR2_LEVELS = Z2 - -FCST_VAR3_NAME = RH -FCST_VAR3_LEVELS = Z2 -FCST_VAR3_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR3_NAME = RH -OBS_VAR3_LEVELS = Z2 - -FCST_VAR4_NAME = UGRD -FCST_VAR4_LEVELS = Z10 -FCST_VAR4_THRESH = ge2.572 -FCST_VAR4_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR4_NAME = UGRD -OBS_VAR4_LEVELS = Z10 -OBS_VAR4_THRESH = ge2.572 - -FCST_VAR5_NAME = VGRD -FCST_VAR5_LEVELS = Z10 -FCST_VAR5_THRESH = ge2.572 -FCST_VAR5_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR5_NAME = VGRD -OBS_VAR5_LEVELS = Z10 -OBS_VAR5_THRESH = ge2.572 - -FCST_VAR6_NAME = WIND -FCST_VAR6_LEVELS = Z10 -FCST_VAR6_THRESH = ge2.572, ge2.572&<5.144, ge5.144, ge10.288, ge15.433 -FCST_VAR6_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; - GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind. -OBS_VAR6_NAME = WIND -OBS_VAR6_LEVELS = Z10 -OBS_VAR6_THRESH = ge2.572, ge2.572&<5.144, ge5.144, ge10.288, ge15.433 -OBS_VAR6_OPTIONS = GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind. - -FCST_VAR7_NAME = PRMSL -FCST_VAR7_LEVELS = Z0 -FCST_VAR7_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR7_NAME = PRMSL -OBS_VAR7_LEVELS = Z0 - -FCST_VAR8_NAME = TCDC -FCST_VAR8_LEVELS = L0 -FCST_VAR8_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; - GRIB_lvl_typ = 200; - GRIB2_ipdtmpl_index=[27]; - GRIB2_ipdtmpl_val=[255]; - interp = { type = [ { method = NEAREST; width = 1; } ]; } -OBS_VAR8_NAME = TCDC -OBS_VAR8_LEVELS = L0 - -FCST_VAR9_NAME = VIS -FCST_VAR9_LEVELS = L0 -FCST_VAR9_THRESH = lt805, lt1609, lt4828, lt8045, ge8045, lt16090 -FCST_VAR9_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; - censor_thresh = [>16090]; - censor_val = [16090]; - interp = { type = [ { method = NEAREST; width = 1; } ]; } -OBS_VAR9_NAME = VIS -OBS_VAR9_LEVELS = L0 -OBS_VAR9_THRESH = lt805, lt1609, lt4828, lt8045, ge8045, lt16090 -OBS_VAR9_OPTIONS = censor_thresh = [>16090]; - censor_val = [16090]; - interp = { type = [ { method = NEAREST; width = 1; } ]; } - -FCST_VAR10_NAME = GUST -FCST_VAR10_LEVELS = Z0 -FCST_VAR10_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR10_NAME = GUST -OBS_VAR10_LEVELS = Z0 - -FCST_VAR11_NAME = HGT -FCST_VAR11_LEVELS = L0 -FCST_VAR11_THRESH = lt152, lt305, lt914, lt1520, lt3040, ge914 -FCST_VAR11_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; - GRIB_lvl_typ = 215; - desc = "CEILING"; -OBS_VAR11_NAME = CEILING -OBS_VAR11_LEVELS = L0 -OBS_VAR11_THRESH = lt152, lt305, lt914, lt1520, lt3040, ge914 -OBS_VAR11_OPTIONS = GRIB_lvl_typ = 215; - interp = { type = [ { method = NEAREST; width = 1; } ]; } - -FCST_VAR12_NAME = SPFH -FCST_VAR12_LEVELS = Z2 -OBS_VAR12_NAME = SPFH -OBS_VAR12_LEVELS = Z2 - -FCST_VAR13_NAME = CRAIN -FCST_VAR13_LEVELS = L0 -FCST_VAR13_THRESH = ge1.0 -OBS_VAR13_NAME = PRWE -OBS_VAR13_LEVELS = Z0 -OBS_VAR13_THRESH = ge161&&le163 - -FCST_VAR14_NAME = CSNOW -FCST_VAR14_LEVELS = L0 -FCST_VAR14_THRESH = ge1.0 -OBS_VAR14_NAME = PRWE -OBS_VAR14_LEVELS = Z0 -OBS_VAR14_THRESH = ge171&&le173 - -FCST_VAR15_NAME = CFRZR -FCST_VAR15_LEVELS = L0 -FCST_VAR15_THRESH = ge1.0 -OBS_VAR15_NAME = PRWE -OBS_VAR15_LEVELS = Z0 -OBS_VAR15_THRESH = ge164&&le166 - -FCST_VAR16_NAME = CICEP -FCST_VAR16_LEVELS = L0 -FCST_VAR16_THRESH = ge1.0 -OBS_VAR16_NAME = PRWE -OBS_VAR16_LEVELS = Z0 -OBS_VAR16_THRESH = ge174&&le176 - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to PointStat. -# -OBS_POINT_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to PointStat. -# -FCST_POINT_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from PointStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -POINT_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to PointStat relative to -# OBS_POINT_STAT_INPUT_DIR. -# -OBS_POINT_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to PointStat relative to -# FCST_POINT_STAT_INPUT_DIR. -# -FCST_POINT_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from PointStat relative to POINT_STAT_OUTPUT_DIR. -# -POINT_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_TEMPLATE = diff --git a/parm/metplus/PointStat_ADPUPA.conf b/parm/metplus/PointStat_ADPUPA.conf deleted file mode 100644 index 519767a51e..0000000000 --- a/parm/metplus/PointStat_ADPUPA.conf +++ /dev/null @@ -1,343 +0,0 @@ -# PointStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PointStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {POINT_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to PointStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -POINT_STAT_CONFIG_FILE = {PARM_BASE}/met_config/PointStatConfig_wrapped - -POINT_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA -#POINT_STAT_OBS_QUALITY_EXC = - -POINT_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST -#POINT_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = - -#POINT_STAT_INTERP_VLD_THRESH = -#POINT_STAT_INTERP_SHAPE = -POINT_STAT_INTERP_TYPE_METHOD = BILIN -POINT_STAT_INTERP_TYPE_WIDTH = 2 - -POINT_STAT_OUTPUT_FLAG_FHO = STAT -POINT_STAT_OUTPUT_FLAG_CTC = STAT -POINT_STAT_OUTPUT_FLAG_CTS = STAT -#POINT_STAT_OUTPUT_FLAG_MCTC = -#POINT_STAT_OUTPUT_FLAG_MCTS = -POINT_STAT_OUTPUT_FLAG_CNT = STAT -POINT_STAT_OUTPUT_FLAG_SL1L2 = STAT -#POINT_STAT_OUTPUT_FLAG_SAL1L2 = -POINT_STAT_OUTPUT_FLAG_VL1L2 = STAT -#POINT_STAT_OUTPUT_FLAG_VAL1L2 = -POINT_STAT_OUTPUT_FLAG_VCNT = STAT -#POINT_STAT_OUTPUT_FLAG_PCT = -#POINT_STAT_OUTPUT_FLAG_PSTD = -#POINT_STAT_OUTPUT_FLAG_PJC = -#POINT_STAT_OUTPUT_FLAG_PRC = -#POINT_STAT_OUTPUT_FLAG_ECNT = -#POINT_STAT_OUTPUT_FLAG_RPS = -#POINT_STAT_OUTPUT_FLAG_ECLV = -#POINT_STAT_OUTPUT_FLAG_MPR = -#POINT_STAT_OUTPUT_FLAG_ORANK = - -POINT_STAT_CLIMO_CDF_BINS = 1 -#POINT_STAT_CLIMO_CDF_CENTER_BINS = False -#POINT_STAT_CLIMO_CDF_WRITE_BINS = True - -#POINT_STAT_HSS_EC_VALUE = - -# -# Observation data time window(s). -# -OBS_WINDOW_BEGIN = -1799 -OBS_WINDOW_END = 1800 -OBS_POINT_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} -OBS_POINT_STAT_WINDOW_END = {OBS_WINDOW_END} - -# Optional list of offsets to look for point observation data -POINT_STAT_OFFSETS = 0 -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the forecast ensemble member. This -# makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_{{ensmem_name}} -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. Here, -# we store the value of the original lead in this column, i.e. the lead -# with zero corresponding to the actual start time of the forecast (which -# is (cdate - time_lag)), not to cdate. This is just the lead in -# LEAD_SEQ with the time lag (time_lag) of the current forecast member -# added on. -# -# Uncomment this line only after upgrading to METplus 5.x. -#POINT_STAT_DESC = {lead?fmt=%H%M%S?shift={{time_lag}}} -POINT_STAT_DESC = NA - -# Regrid to specified grid. Indicate NONE if no regridding, or the grid id -# (e.g. G212) -POINT_STAT_REGRID_TO_GRID = NONE -POINT_STAT_REGRID_METHOD = BILIN -POINT_STAT_REGRID_WIDTH = 2 - -POINT_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# sets the -obs_valid_beg command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H} - -# sets the -obs_valid_end command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_END = {valid?fmt=%Y%m%d_%H} - -# Verification Masking regions -# Indicate which grid and polygon masking region, if applicable -POINT_STAT_GRID = - -# List of full path to poly masking files. NOTE: Only short lists of poly -# files work (those that fit on one line), a long list will result in an -# environment variable that is too long, resulting in an error. For long -# lists of poly masking files (i.e. all the mask files in the NCEP_mask -# directory), define these in the METplus PointStat configuration file. -POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly -POINT_STAT_STATION_ID = - -# Message types, if all message types are to be returned, leave this empty, -# otherwise indicate the message types of interest. -POINT_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} - -# set to True to run PointStat once for each name/level combination -# set to False to run PointStat once per run time including all fields -POINT_STAT_ONCE_PER_FIELD = False -# -# List of forecast and corresponding observation fields to process. -# -# Note on use of set_attr_lead and ensemble member time-lagging: -# ------------------------------------------------------------- -# The set_attr_lead parameter appearing below in [FCST|OBS]_VAR_OPTIONS -# specifies the lead to use both in naming of the output .stat and .nc -# files and for setting the lead values contained in those files. This -# option causes MET/METplus to use the lead values in the variable LEAD_SEQ -# set above, which are the same for all ensemble forecast members (i.e. -# regardless of whether members are time lagged with respect to the -# nominal cycle date specified by cdate). If set_attr_lead were not -# specified as below, then MET/METplus would get the lead from the input -# forecast file, and that would in general differ from one ensemble member -# to the next depending on whether the member is time-lagged. That would -# cause confusion, so here, we always use lead values with zero lead -# corresponding to the nominal cdate. -# -FCST_VAR1_NAME = TMP -FCST_VAR1_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 -FCST_VAR1_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR1_NAME = TMP -OBS_VAR1_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 - -FCST_VAR2_NAME = RH -FCST_VAR2_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250 -FCST_VAR2_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR2_NAME = RH -OBS_VAR2_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250 - -FCST_VAR3_NAME = DPT -FCST_VAR3_LEVELS = P1000, P925, P850, P700, P500, P400, P300 -FCST_VAR3_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR3_NAME = DPT -OBS_VAR3_LEVELS = P1000, P925, P850, P700, P500, P400, P300 - -FCST_VAR4_NAME = UGRD -FCST_VAR4_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 -FCST_VAR4_THRESH = ge2.572 -FCST_VAR4_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR4_NAME = UGRD -OBS_VAR4_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 -OBS_VAR4_THRESH = ge2.572 - -FCST_VAR5_NAME = VGRD -FCST_VAR5_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 -FCST_VAR5_THRESH = ge2.572 -FCST_VAR5_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR5_NAME = VGRD -OBS_VAR5_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 -OBS_VAR5_THRESH = ge2.572 - -FCST_VAR6_NAME = WIND -FCST_VAR6_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 -FCST_VAR6_THRESH = ge2.572, ge2.572&<5.144, ge5.144, ge10.288, ge15.433, ge20.577, ge25.722 -FCST_VAR6_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR6_NAME = WIND -OBS_VAR6_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 -OBS_VAR6_THRESH = ge2.572, ge2.572&<5.144, ge5.144, ge10.288, ge15.433, ge20.577, ge25.722 - -FCST_VAR7_NAME = HGT -FCST_VAR7_LEVELS = P1000, P950, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 -FCST_VAR7_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR7_NAME = HGT -OBS_VAR7_LEVELS = P1000, P950, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 - -FCST_VAR8_NAME = SPFH -FCST_VAR8_LEVELS = P1000, P850, P700, P500, P400, P300 -FCST_VAR8_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR8_NAME = SPFH -OBS_VAR8_LEVELS = P1000, P850, P700, P500, P400, P300 - -FCST_VAR9_NAME = CAPE -FCST_VAR9_LEVELS = L0 -FCST_VAR9_THRESH = gt500, gt1000, gt1500, gt2000, gt3000, gt4000 -FCST_VAR9_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; - cnt_thresh = [ >0 ]; -OBS_VAR9_NAME = CAPE -OBS_VAR9_LEVELS = L0-100000 -OBS_VAR9_THRESH = gt500, gt1000, gt1500, gt2000, gt3000, gt4000 -OBS_VAR9_OPTIONS = cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -FCST_VAR10_NAME = HPBL -FCST_VAR10_LEVELS = Z0 -FCST_VAR10_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR10_NAME = PBL -OBS_VAR10_LEVELS = L0 -OBS_VAR10_OPTIONS = desc = "TKE"; - -FCST_VAR11_NAME = HGT -FCST_VAR11_LEVELS = L0 -FCST_VAR11_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; - GRIB_lvl_typ = 220; -OBS_VAR11_NAME = PBL -OBS_VAR11_LEVELS = L0 -OBS_VAR11_OPTIONS = desc = "RI"; - -FCST_VAR12_NAME = CAPE -FCST_VAR12_LEVELS = L0-90 -FCST_VAR12_THRESH = gt500, gt1000, gt1500, gt2000, gt3000, gt4000 -FCST_VAR12_OPTIONS = cnt_thresh = [ >0 ]; -OBS_VAR12_NAME = MLCAPE -OBS_VAR12_LEVELS = L0 -OBS_VAR12_THRESH = gt500, gt1000, gt1500, gt2000, gt3000, gt4000 -OBS_VAR12_OPTIONS = cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to PointStat. -# -OBS_POINT_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to PointStat. -# -FCST_POINT_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from PointStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -POINT_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to PointStat relative to -# OBS_POINT_STAT_INPUT_DIR. -# -OBS_POINT_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to PointStat relative to -# FCST_POINT_STAT_INPUT_DIR. -# -FCST_POINT_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from PointStat relative to POINT_STAT_OUTPUT_DIR. -# -POINT_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_TEMPLATE = diff --git a/parm/metplus/PointStat_ensmean.conf b/parm/metplus/PointStat_ensmean.conf new file mode 100644 index 0000000000..67a20034df --- /dev/null +++ b/parm/metplus/PointStat_ensmean.conf @@ -0,0 +1,566 @@ +# Ensemble mean {{MetplusToolName}} METplus Configuration + +[config] + +# List of applications (tools) to run. +PROCESS_LIST = {{MetplusToolName}} + +# time looping - options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +LOOP_BY = INIT + +# Format of INIT_BEG and INIT_END using % items +# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. +# see www.strftime.org for more information +# %Y%m%d%H expands to YYYYMMDDHH +INIT_TIME_FMT = %Y%m%d%H + +# Start time for METplus run - must match INIT_TIME_FMT +INIT_BEG = {{cdate}} + +# End time for METplus run - must match INIT_TIME_FMT +INIT_END = {{cdate}} + +# Increment between METplus runs (in seconds if no units are specified). +# Must be >= 60 seconds. +INIT_INCREMENT = 3600 + +# List of forecast leads to process for each run time (init or valid) +# In hours if units are not specified +# If unset, defaults to 0 (don't loop through forecast leads) +LEAD_SEQ = {{fhr_list}} +# +# Order of loops to process data - Options are times, processes +# Not relevant if only one item is in the PROCESS_LIST +# times = run all wrappers in the PROCESS_LIST for a single run time, then +# increment the run time and run all wrappers again until all times have +# been evaluated. +# processes = run the first wrapper in the PROCESS_LIST for all times +# specified, then repeat for the next item in the PROCESS_LIST until all +# wrappers have been run +# +LOOP_ORDER = times +# +# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. +# +LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} +# +# Specify the name of the METplus log file. +# +LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} +# +# Specify the location and name of the final METplus conf file. +# +METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}} +# +# Location of MET configuration file to pass to {{MetplusToolName}}. +# +# References PARM_BASE, which is the location of the parm directory +# corresponding to the ush directory of the run_metplus.py script that +# is called or the value of the environment variable METPLUS_PARM_BASE +# if set. +# +{{METPLUS_TOOL_NAME}}_CONFIG_FILE = {PARM_BASE}/met_config/{{MetplusToolName}}Config_wrapped + +{{METPLUS_TOOL_NAME}}_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA +#{{METPLUS_TOOL_NAME}}_OBS_QUALITY_EXC = + +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_TIME_INTERP_METHOD = + +#{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = BILIN +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 2 + +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_FHO = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTS = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTS = +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CNT = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SL1L2 = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SAL1L2 = +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VL1L2 = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VAL1L2 = +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VCNT = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PCT = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PSTD = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PJC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PRC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECNT = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_RPS = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MPR = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ORANK = + +{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1 +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = True + +#{{METPLUS_TOOL_NAME}}_HSS_EC_VALUE = + +# +# Observation data time window(s). +# +OBS_WINDOW_BEGIN = -1799 +OBS_WINDOW_END = 1800 +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_END = {OBS_WINDOW_END} + +# Optional list of offsets to look for point observation data +{{METPLUS_TOOL_NAME}}_OFFSETS = 0 +# +# Name to identify model (forecast) data in output. +# +# The variable MODEL is recorded in the stat files, and the data in +# these files is then plotted (e.g. using METViewer). Here, we add a +# suffix to MODEL that identifies the data as that for the ensemble +# mean. This makes it easier to identify each curve. +# +MODEL = {{vx_fcst_model_name}}_ensmean +# +# Name to identify observation data in output. +# +OBTYPE = {{obtype}} +# +# Value to enter under the DESC column in the output stat file. +# +{{METPLUS_TOOL_NAME}}_DESC = NA + +# Regrid to specified grid. Indicate NONE if no regridding, or the grid id +# (e.g. G212) +{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = NONE +{{METPLUS_TOOL_NAME}}_REGRID_METHOD = BILIN +{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2 + +{{METPLUS_TOOL_NAME}}_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} + +# sets the -obs_valid_beg command line argument (optional) +# not used for this example +#{{METPLUS_TOOL_NAME}}_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H} + +# sets the -obs_valid_end command line argument (optional) +# not used for this example +#{{METPLUS_TOOL_NAME}}_OBS_VALID_END = {valid?fmt=%Y%m%d_%H} + +# Verification Masking regions +# Indicate which grid and polygon masking region, if applicable +{{METPLUS_TOOL_NAME}}_GRID = + +# List of full path to poly masking files. NOTE: Only short lists of poly +# files work (those that fit on one line), a long list will result in an +# environment variable that is too long, resulting in an error. For long +# lists of poly masking files (i.e. all the mask files in the NCEP_mask +# directory), define these in the METplus {{MetplusToolName}} configuration file. +{{METPLUS_TOOL_NAME}}_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly +{{METPLUS_TOOL_NAME}}_STATION_ID = + +# Message types, if all message types are to be returned, leave this empty, +# otherwise indicate the message types of interest. +{{METPLUS_TOOL_NAME}}_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} + +# set to True to run {{MetplusToolName}} once for each name/level combination +# set to False to run {{MetplusToolName}} once per run time including all fields +{{METPLUS_TOOL_NAME}}_ONCE_PER_FIELD = False +# +# List of forecast and corresponding observation fields to process. +# +{#- +Import the file containing jinja macros. +#} +{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %} + +{#- +Set the probabilistic threshold to be used for the forecast field. If +necessary, this can be changed to be an input parameter in the calling +script instead of a hard-coded value as below. +#} +{%- set thresh_fcst_prob = '==0.1' %} + +{#- +Jinja requires certain variables to be defined globally within the template +before they can be used in if-statements and other scopes (see Jinja +scoping rules). Define such variables. +#} +{%- set level_fcst = '' %} +{%- set level_obs = '' %} +{%- set indx_level_fcst = '' %} + +{%- set valid_threshes_fcst = [] %} +{%- set valid_threshes_obs = [] %} +{%- set threshes_fcst = '' %} +{%- set threshes_obs = '' %} +{%- set indx_input_thresh_fcst = '' %} + +{%- set opts_indent = '' %} +{%- set opts_indent_len = '' %} +{%- set tmp = '' %} +{%- set error_msg = '' %} + +{#- +Make sure that the set of field groups for forecasts and observations +are identical. +#} +{%- set fgs_fcst = vx_config_dict['fcst'].keys()|list %} +{%- set fgs_obs = vx_config_dict['obs'].keys()|list %} +{%- if (fgs_fcst != fgs_obs) %} + {%- set error_msg = '\n' ~ +'The list of valid field groups for forecasts (fgs_fcst) must be identical\n' ~ +'to that for observations (fgs_obs) but isn\'t:\n' ~ +' fgs_fcst = ' ~ fgs_fcst ~ '\n' ~ +' fgs_obs = ' ~ fgs_obs %} + {{metplus_macros.print_err_and_quit(error_msg)}} +{%- endif %} + +{#- +Extract the lists of forecast and observation dictionaries containing +the valid fields, levels, and thresholds corresponding to the specified +field group (input_field_group). Note that it would be simpler to have +these be just dictionaries in which the keys are the field names (instead +of them being LISTS of dictionaries in which each dictionary contains a +single key that is the field name), but that approach cannot be used here +because it is possible for field names to be repeated (for both forecasts +and observations). For example, in the observations, the field name +'PRWE' appears more than once, each time with a different threshold, and +the combination of name and threshold is what constitutes a unique field, +not just the name by itself. +#} +{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %} +{%- set fields_levels_threshes_obs = vx_config_dict['obs'][input_field_group] %} + +{#- +Reset the specified forecast level so that if it happens to be an +accumulation (e.g. 'A03'), the leading zeros in front of the hour are +stipped out (e.g. reset to 'A3'). +#} +{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} + +{#- +Ensure that the specified input forecast level(s) (input_level_fcst) and +threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the +set(s) of valid forecast levels and thresholds, respectively, specified +in fields_levels_threshes_fcst. +#} +{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }} +{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }} + +{#- +Some fields in the specified field group (input_field_group) may need to +be excluded from the METplus config file because calculating means for +them doesn't make sense. List these (for each input_field_group) in the +following dictionary. +#} +{%- set fields_fcst_to_exclude_by_field_group = + {'APCP': [], + 'ASNOW': [], + 'REFC': [], + 'RETOP': [], + 'ADPSFC': ['TCDC', 'VIS', 'HGT'], + 'ADPUPA': []} %} +{%- set fields_fcst_to_exclude = fields_fcst_to_exclude_by_field_group[input_field_group] %} + +{#- +For convenience, create lists of valid forecast and observation field +names. +#} +{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %} +{%- set valid_fields_fcst = [] %} +{%- for i in range(0,num_valid_fields_fcst) %} + {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %} + {%- set tmp = valid_fields_fcst.append(field) %} +{%- endfor %} + +{%- set valid_fields_obs = [] %} +{%- set num_valid_fields_obs = fields_levels_threshes_obs|length %} +{%- for i in range(0,num_valid_fields_obs) %} + {%- set field = fields_levels_threshes_obs[i].keys()|list|join('') %} + {%- set tmp = valid_fields_obs.append(field) %} +{%- endfor %} + +{#- +Ensure that the number of valid fields for forecasts is equal to that +for the observations. +#} +{%- set num_valid_fields = 0 %} +{%- if (num_valid_fields_fcst != num_valid_fields_obs) %} + {%- set error_msg = '\n' ~ +'The number of valid forecast fields (num_valid_fields_fcst) must be\n' ~ +'equal to the number of valid observation fields (num_valid_fields_obs)\n' ~ +'but isn\'t:\n' ~ +' num_valid_fields_fcst = ' ~ num_valid_fields_fcst ~ '\n' ~ +' num_valid_fields_obs = ' ~ num_valid_fields_obs ~ '\n' ~ +'The lists of valid forecast and observation fields are:\n' ~ +' valid_fields_fcst = ' ~ valid_fields_fcst ~ '\n' ~ +' valid_fields_obs = ' ~ valid_fields_obs ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} +{%- else %} + {%- set num_valid_fields = num_valid_fields_fcst %} +{%- endif %} + +{#- +Loop over the valid fields and set field names, levels, thresholds, and/ +or options for each field, both for forecasts and for obseratiions, in +the METplus configuration file. +#} +{%- set ns = namespace(var_count = 0) %} + +{%- for i in range(0,num_valid_fields) if valid_fields_fcst[i] not in fields_fcst_to_exclude %} + + {%- set field_fcst = valid_fields_fcst[i] %} + {%- set field_obs = valid_fields_obs[i] %} + +{#- +For convenience, create lists of valid forecast and observation levels +for the current field. Then check that the number of valid levels for +forecasts is the same as that for observations. +#} + {%- set valid_levels_fcst = fields_levels_threshes_fcst[i][field_fcst].keys()|list %} + {%- set valid_levels_obs = fields_levels_threshes_obs[i][field_obs].keys()|list %} + +{#- +Extract dictionary of valid forecast levels (the dictionary keys) and +corresponding lists of valid thresholds (the values) for each level. +Then loop over these levels and corresponding lists of thresholds to set +both the forecast and observation field names, levels, thresholds, and/or +options. +#} + {%- set valid_levels_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst] %} + {%- for level_fcst, valid_threshes_fcst in valid_levels_threshes_fcst.items() %} + + {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %} +{#- +Increment the METplus variable counter. +#} + {%- set ns.var_count = ns.var_count+1 %} + +{#- +Set forecast field name. +#} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{level_fcst}}_ENS_MEAN + +{#- +Set forecast field level. +#} +FCST_VAR{{ns.var_count}}_LEVELS = {{level_fcst}} + +{#- +Set forecast field threshold(s). Note that no forecast thresholds are +included in the METplus configuration file if input_thresh_fcst is set +to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +If input_thresh_fcst is set to 'all', set the list of forecast thresholds +to the full set of valid values. +#} + {%- if (input_thresh_fcst == 'all') %} + + {%- set threshes_fcst = valid_threshes_fcst %} +{#- +If input_thresh_fcst is set to a specific value: + 1) Ensure that input_thresh_fcst exists in the list of valid forecast + thresholds. + 2) Get the index of input_thresh_fcst in the list of valid forecast + thresholds. This will be needed later below when setting the + observation threshold(s). + 3) Use this index to set the forecast threshold to a one-element list + containing the specified forecast threshold. +#} + {%- else %} + + {%- if input_thresh_fcst not in valid_threshes_fcst %} + {%- set error_msg = '\n' ~ +'For the current forecast field (field_fcst) and forecast level (level_fcst),\n' ~ +'the input forecast threshold (input_thresh_fcst) does not exist in the list\n' ~ +'of valid forecast thresholds (valid_threshes_fcst):\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' level_fcst = ' ~ level_fcst ~ '\n' ~ +' valid_threshes_fcst = ' ~ valid_threshes_fcst ~ '\n' ~ +' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %} + {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %} + + {%- endif %} +{#- +If threshes_fcst has been reset to something other than its default +value of an empty list, then set the forecast thresholds in the METplus +configuration file because that implies threshes_fcst was set above to +a non-empty value. Then reset threshes_fcst to its default value for +proper processing of thresholds for the next field. +#} + {%- if (threshes_fcst != []) %} +FCST_VAR{{ns.var_count}}_THRESH = {{threshes_fcst|join(', ')}} + {%- endif %} + {%- set threshes_fcst = [] %} + + {%- endif %} + +{#- +Set forecast field options. +#} + {%- set opts_indent_len = 20 %} + {%- if (ns.var_count > 9) and (ns.var_count <= 99) %} + {%- set opts_indent_len = opts_indent_len + 1 %} + {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %} + {%- set opts_indent_len = opts_indent_len + 2 %} + {%- elif (ns.var_count > 999) %} + {%- set opts_indent_len = opts_indent_len + 3 %} + {%- endif %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'ADPUPA' %} + + {%- if field_fcst == 'CAPE' %} +FCST_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; + {%- endif %} + + {%- endif %} + +{#- +Set observation field name. +#} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}} + +{#- +Set observation field level. +#} + {%- set indx_level_fcst = valid_levels_fcst.index(level_fcst) %} + {%- set level_obs = valid_levels_obs[indx_level_fcst] %} +OBS_VAR{{ns.var_count}}_LEVELS = {{level_obs}} + +{#- +Set observation field threshold(s). Note that no observation thresholds +are included in the METplus configuration file if input_thresh_fcst is +set to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +Set the list of valid observation thresholds to the one corresponding to +the current observation level (level_obs). +#} + {%- set valid_threshes_obs = fields_levels_threshes_obs[i][field_obs][level_obs] %} +{#- +If input_thresh_fcst is set to 'all', set the list of observation thresholds +to the full set of valid values. +#} + {%- if (input_thresh_fcst == 'all') %} + + {%- set threshes_obs = valid_threshes_obs %} +{#- +If input_thresh_fcst is set to a specific forecast threshold, then the +observation threshold is given by the element in the list of valid +observation thresholds that has the same index as that of input_thresh_fcst +in the list of valid forecast thresholds. +#} + {%- else %} + + {%- set threshes_obs = [valid_threshes_obs[indx_input_thresh_fcst]] %} + + {%- endif %} +{#- +If threshes_obs has been reset to something other than its default value +of an empty list, then set the observation thresholds in the METplus +configuration file because that implies threshes_obs was set above to +a non-empty value. Then reset threshes_obs to its default value for +proper processing of thresholds for the next field. +#} + {%- if (threshes_obs != []) %} +OBS_VAR{{ns.var_count}}_THRESH = {{threshes_obs|join(', ')}} + {%- endif %} + {%- set threshes_obs = [] %} + + {%- endif %} + +{#- +Set observation field options. +#} + {%- set opts_indent_len = opts_indent_len - 1 %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'ADPUPA' %} + + {%- if field_obs == 'CAPE' %} +OBS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; +{{opts_indent}}cnt_logic = UNION; + {%- elif field_obs == 'PBL' %} +OBS_VAR{{ns.var_count}}_OPTIONS = desc = "TKE"; + {%- endif %} + + {%- endif %} + +{#- +Print out a newline to separate the settings for the current field (both +forecast and observation settings) from those for the next field. +#} + {{- '\n' }} + + {%- endif %} + + {%- endfor %} +{%- endfor %} +# End of [config] section and start of [dir] section. +[dir] +# +# Directory containing observation input to {{MetplusToolName}}. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{obs_input_dir}} +# +# Directory containing forecast input to {{MetplusToolName}}. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}} +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR = +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR = +# +# Directory in which to write output from {{MetplusToolName}}. +# +# OUTPUT_BASE apparently has to be set to something; it cannot be left +# to its default value. But it is not explicitly used elsewhere in this +# configuration file. +# +OUTPUT_BASE = {{output_base}} +{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}} +# +# Directory for staging data. +# +STAGING_DIR = {{staging_dir}} + +# End of [dir] section and start of [filename_templates] section. +[filename_templates] +# +# Template for observation input to {{MetplusToolName}} relative to +# OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{obs_input_fn_template}} +# +# Template for forecast input to {{MetplusToolName}} relative to +# FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}} +# +# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR. +# +{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_TEMPLATE = diff --git a/parm/metplus/PointStat_ensmean_ADPSFC.conf b/parm/metplus/PointStat_ensmean_ADPSFC.conf deleted file mode 100644 index 6b7e7e9cff..0000000000 --- a/parm/metplus/PointStat_ensmean_ADPSFC.conf +++ /dev/null @@ -1,252 +0,0 @@ -# Ensemble mean PointStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PointStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {POINT_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to PointStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -POINT_STAT_CONFIG_FILE = {PARM_BASE}/met_config/PointStatConfig_wrapped - -POINT_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA -#POINT_STAT_OBS_QUALITY_EXC = - -POINT_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST -#POINT_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = - -#POINT_STAT_INTERP_VLD_THRESH = -#POINT_STAT_INTERP_SHAPE = -POINT_STAT_INTERP_TYPE_METHOD = BILIN -POINT_STAT_INTERP_TYPE_WIDTH = 2 - -#POINT_STAT_OUTPUT_FLAG_FHO = -#POINT_STAT_OUTPUT_FLAG_CTC = -#POINT_STAT_OUTPUT_FLAG_CTS = -#POINT_STAT_OUTPUT_FLAG_MCTC = -#POINT_STAT_OUTPUT_FLAG_MCTS = -POINT_STAT_OUTPUT_FLAG_CNT = STAT -POINT_STAT_OUTPUT_FLAG_SL1L2 = STAT -#POINT_STAT_OUTPUT_FLAG_SAL1L2 = -POINT_STAT_OUTPUT_FLAG_VL1L2 = STAT -#POINT_STAT_OUTPUT_FLAG_VAL1L2 = -POINT_STAT_OUTPUT_FLAG_VCNT = STAT -#POINT_STAT_OUTPUT_FLAG_PCT = -#POINT_STAT_OUTPUT_FLAG_PSTD = -#POINT_STAT_OUTPUT_FLAG_PJC = -#POINT_STAT_OUTPUT_FLAG_PRC = -#POINT_STAT_OUTPUT_FLAG_ECNT = -#POINT_STAT_OUTPUT_FLAG_RPS = -#POINT_STAT_OUTPUT_FLAG_ECLV = -#POINT_STAT_OUTPUT_FLAG_MPR = -#POINT_STAT_OUTPUT_FLAG_ORANK = - -POINT_STAT_CLIMO_CDF_BINS = 1 -#POINT_STAT_CLIMO_CDF_CENTER_BINS = False -#POINT_STAT_CLIMO_CDF_WRITE_BINS = True - -#POINT_STAT_HSS_EC_VALUE = - -# -# Observation data time window(s). -# -OBS_WINDOW_BEGIN = -1799 -OBS_WINDOW_END = 1800 -OBS_POINT_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} -OBS_POINT_STAT_WINDOW_END = {OBS_WINDOW_END} - -# Optional list of offsets to look for point observation data -POINT_STAT_OFFSETS = 0 -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as that for the ensemble -# mean. This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensmean -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -POINT_STAT_DESC = NA - -# Regrid to specified grid. Indicate NONE if no regridding, or the grid id -# (e.g. G212) -POINT_STAT_REGRID_TO_GRID = NONE -POINT_STAT_REGRID_METHOD = BILIN -POINT_STAT_REGRID_WIDTH = 2 - -POINT_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# sets the -obs_valid_beg command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H} - -# sets the -obs_valid_end command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_END = {valid?fmt=%Y%m%d_%H} - -# Verification Masking regions -# Indicate which grid and polygon masking region, if applicable -POINT_STAT_GRID = - -# List of full path to poly masking files. NOTE: Only short lists of poly -# files work (those that fit on one line), a long list will result in an -# environment variable that is too long, resulting in an error. For long -# lists of poly masking files (i.e. all the mask files in the NCEP_mask -# directory), define these in the METplus PointStat configuration file. -POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly -POINT_STAT_STATION_ID = - -# Message types, if all message types are to be returned, leave this empty, -# otherwise indicate the message types of interest. -POINT_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} - -# set to True to run PointStat once for each name/level combination -# set to False to run PointStat once per run time including all fields -POINT_STAT_ONCE_PER_FIELD = False -# -# List of forecast and corresponding observation fields to process. -# -FCST_VAR1_NAME = TMP_Z2_ENS_MEAN -FCST_VAR1_LEVELS = Z2 -FCST_VAR1_THRESH = ge268, ge273, ge278, ge293, ge298, ge303 -OBS_VAR1_NAME = TMP -OBS_VAR1_LEVELS = Z2 -OBS_VAR1_THRESH = ge268, ge273, ge278, ge293, ge298, ge303 - -FCST_VAR2_NAME = DPT_Z2_ENS_MEAN -FCST_VAR2_LEVELS = Z2 -FCST_VAR2_THRESH = ge263, ge268, ge273, ge288, ge293, ge298 -OBS_VAR2_NAME = DPT -OBS_VAR2_LEVELS = Z2 -OBS_VAR2_THRESH = ge263, ge268, ge273, ge288, ge293, ge298 - -FCST_VAR3_NAME = WIND_Z10_ENS_MEAN -FCST_VAR3_LEVELS = Z10 -FCST_VAR3_THRESH = ge5, ge10, ge15 -OBS_VAR3_NAME = WIND -OBS_VAR3_LEVELS = Z10 -OBS_VAR3_THRESH = ge5, ge10, ge15 - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to PointStat. -# -OBS_POINT_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to PointStat. -# -FCST_POINT_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from PointStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -POINT_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to PointStat relative to -# OBS_POINT_STAT_INPUT_DIR. -# -OBS_POINT_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to PointStat relative to -# FCST_POINT_STAT_INPUT_DIR. -# -FCST_POINT_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from PointStat relative to POINT_STAT_OUTPUT_DIR. -# -POINT_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_TEMPLATE = diff --git a/parm/metplus/PointStat_ensmean_ADPUPA.conf b/parm/metplus/PointStat_ensmean_ADPUPA.conf deleted file mode 100644 index b54c775b46..0000000000 --- a/parm/metplus/PointStat_ensmean_ADPUPA.conf +++ /dev/null @@ -1,319 +0,0 @@ -# Ensemble mean PointStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PointStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {POINT_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to PointStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -POINT_STAT_CONFIG_FILE = {PARM_BASE}/met_config/PointStatConfig_wrapped - -POINT_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA -#POINT_STAT_OBS_QUALITY_EXC = - -POINT_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST -#POINT_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = - -#POINT_STAT_INTERP_VLD_THRESH = -#POINT_STAT_INTERP_SHAPE = -POINT_STAT_INTERP_TYPE_METHOD = BILIN -POINT_STAT_INTERP_TYPE_WIDTH = 2 - -#POINT_STAT_OUTPUT_FLAG_FHO = -#POINT_STAT_OUTPUT_FLAG_CTC = -#POINT_STAT_OUTPUT_FLAG_CTS = -#POINT_STAT_OUTPUT_FLAG_MCTC = -#POINT_STAT_OUTPUT_FLAG_MCTS = -POINT_STAT_OUTPUT_FLAG_CNT = STAT -POINT_STAT_OUTPUT_FLAG_SL1L2 = STAT -#POINT_STAT_OUTPUT_FLAG_SAL1L2 = -POINT_STAT_OUTPUT_FLAG_VL1L2 = STAT -#POINT_STAT_OUTPUT_FLAG_VAL1L2 = -POINT_STAT_OUTPUT_FLAG_VCNT = STAT -#POINT_STAT_OUTPUT_FLAG_PCT = -#POINT_STAT_OUTPUT_FLAG_PSTD = -#POINT_STAT_OUTPUT_FLAG_PJC = -#POINT_STAT_OUTPUT_FLAG_PRC = -#POINT_STAT_OUTPUT_FLAG_ECNT = -#POINT_STAT_OUTPUT_FLAG_RPS = -#POINT_STAT_OUTPUT_FLAG_ECLV = -#POINT_STAT_OUTPUT_FLAG_MPR = -#POINT_STAT_OUTPUT_FLAG_ORANK = - -POINT_STAT_CLIMO_CDF_BINS = 1 -#POINT_STAT_CLIMO_CDF_CENTER_BINS = False -#POINT_STAT_CLIMO_CDF_WRITE_BINS = True - -#POINT_STAT_HSS_EC_VALUE = - -# -# Observation data time window(s). -# -OBS_WINDOW_BEGIN = -1799 -OBS_WINDOW_END = 1800 -OBS_POINT_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} -OBS_POINT_STAT_WINDOW_END = {OBS_WINDOW_END} - -# Optional list of offsets to look for point observation data -POINT_STAT_OFFSETS = 0 -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as that for the ensemble -# mean. This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensmean -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -POINT_STAT_DESC = NA - -# Regrid to specified grid. Indicate NONE if no regridding, or the grid id -# (e.g. G212) -POINT_STAT_REGRID_TO_GRID = NONE -POINT_STAT_REGRID_METHOD = BILIN -POINT_STAT_REGRID_WIDTH = 2 - -POINT_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# sets the -obs_valid_beg command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H} - -# sets the -obs_valid_end command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_END = {valid?fmt=%Y%m%d_%H} - -# Verification Masking regions -# Indicate which grid and polygon masking region, if applicable -POINT_STAT_GRID = - -# List of full path to poly masking files. NOTE: Only short lists of poly -# files work (those that fit on one line), a long list will result in an -# environment variable that is too long, resulting in an error. For long -# lists of poly masking files (i.e. all the mask files in the NCEP_mask -# directory), define these in the METplus PointStat configuration file. -POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly -POINT_STAT_STATION_ID = - -# Message types, if all message types are to be returned, leave this empty, -# otherwise indicate the message types of interest. -POINT_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} - -# set to True to run PointStat once for each name/level combination -# set to False to run PointStat once per run time including all fields -POINT_STAT_ONCE_PER_FIELD = False -# -# List of forecast and corresponding observation fields to process. -# -FCST_VAR1_NAME = TMP_P850_ENS_MEAN -FCST_VAR1_LEVELS = P850 -FCST_VAR1_THRESH = ge288, ge293, ge298 -OBS_VAR1_NAME = TMP -OBS_VAR1_LEVELS = P850 -OBS_VAR1_THRESH = ge288, ge293, ge298 - -FCST_VAR2_NAME = TMP_P700_ENS_MEAN -FCST_VAR2_LEVELS = P700 -FCST_VAR2_THRESH = ge273, ge278, ge283 -OBS_VAR2_NAME = TMP -OBS_VAR2_LEVELS = P700 -OBS_VAR2_THRESH = ge273, ge278, ge283 - -FCST_VAR3_NAME = TMP_P500_ENS_MEAN -FCST_VAR3_LEVELS = P500 -FCST_VAR3_THRESH = ge258, ge263, ge268 -OBS_VAR3_NAME = TMP -OBS_VAR3_LEVELS = P500 -OBS_VAR3_THRESH = ge258, ge263, ge268 - -FCST_VAR4_NAME = DPT_P850_ENS_MEAN -FCST_VAR4_LEVELS = P850 -FCST_VAR4_THRESH = ge273, ge278, ge283 -OBS_VAR4_NAME = DPT -OBS_VAR4_LEVELS = P850 -OBS_VAR4_THRESH = ge273, ge278, ge283 - -FCST_VAR5_NAME = DPT_P700_ENS_MEAN -FCST_VAR5_LEVELS = P700 -FCST_VAR5_THRESH = ge263, ge286, ge273 -OBS_VAR5_NAME = DPT -OBS_VAR5_LEVELS = P700 -OBS_VAR5_THRESH = ge263, ge286, ge273 - -FCST_VAR6_NAME = WIND_P850_ENS_MEAN -FCST_VAR6_LEVELS = P850 -FCST_VAR6_THRESH = ge5, ge10, ge15 -OBS_VAR6_NAME = WIND -OBS_VAR6_LEVELS = P850 -OBS_VAR6_THRESH = ge5, ge10, ge15 - -FCST_VAR7_NAME = WIND_P700_ENS_MEAN -FCST_VAR7_LEVELS = P700 -FCST_VAR7_THRESH = ge10, ge15, ge20 -OBS_VAR7_NAME = WIND -OBS_VAR7_LEVELS = P700 -OBS_VAR7_THRESH = ge10, ge15, ge20 - -FCST_VAR8_NAME = WIND_P500_ENS_MEAN -FCST_VAR8_LEVELS = P500 -FCST_VAR8_THRESH = ge15, ge21, ge26 -OBS_VAR8_NAME = WIND -OBS_VAR8_LEVELS = P500 -OBS_VAR8_THRESH = ge15, ge21, ge26 - -FCST_VAR9_NAME = WIND_P250_ENS_MEAN -FCST_VAR9_LEVELS = P250 -FCST_VAR9_THRESH = ge26, ge31, ge46, ge62 -OBS_VAR9_NAME = WIND -OBS_VAR9_LEVELS = P250 -OBS_VAR9_THRESH = ge26, ge31, ge46, ge62 - -FCST_VAR10_NAME = HGT_P500_ENS_MEAN -FCST_VAR10_LEVELS = P500 -FCST_VAR10_THRESH = ge5400, ge5600, ge5880 -OBS_VAR10_NAME = HGT -OBS_VAR10_LEVELS = P500 -OBS_VAR10_THRESH = ge5400, ge5600, ge5880 - -FCST_VAR11_NAME = CAPE_L0_ENS_MEAN -FCST_VAR11_LEVELS = L0 -FCST_VAR11_THRESH = le1000, gt1000&<2500, gt2500&<4000, gt2500 -FCST_VAR11_OPTIONS = cnt_thresh = [ >0 ]; -OBS_VAR11_NAME = CAPE -OBS_VAR11_LEVELS = L0-100000 -OBS_VAR11_THRESH = le1000, gt1000&<2500, gt2500&<4000, gt2500 -OBS_VAR11_OPTIONS = cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -FCST_VAR12_NAME = HPBL_Z0_ENS_MEAN -FCST_VAR12_LEVELS = Z0 -FCST_VAR12_THRESH = lt500, lt1500, gt1500 -OBS_VAR12_NAME = PBL -OBS_VAR12_LEVELS = L0 -OBS_VAR12_THRESH = lt500, lt1500, gt1500 -OBS_VAR12_OPTIONS = desc = "TKE"; - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to PointStat. -# -OBS_POINT_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to PointStat. -# -FCST_POINT_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from PointStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -POINT_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to PointStat relative to -# OBS_POINT_STAT_INPUT_DIR. -# -OBS_POINT_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to PointStat relative to -# FCST_POINT_STAT_INPUT_DIR. -# -FCST_POINT_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from PointStat relative to POINT_STAT_OUTPUT_DIR. -# -POINT_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_TEMPLATE = diff --git a/parm/metplus/PointStat_ensprob.conf b/parm/metplus/PointStat_ensprob.conf new file mode 100644 index 0000000000..69ef9fd5db --- /dev/null +++ b/parm/metplus/PointStat_ensprob.conf @@ -0,0 +1,524 @@ +# Ensemble probabilistic {{MetplusToolName}} METplus Configuration + +[config] + +# List of applications (tools) to run. +PROCESS_LIST = {{MetplusToolName}} + +# time looping - options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +LOOP_BY = INIT + +# Format of INIT_BEG and INIT_END using % items +# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. +# see www.strftime.org for more information +# %Y%m%d%H expands to YYYYMMDDHH +INIT_TIME_FMT = %Y%m%d%H + +# Start time for METplus run - must match INIT_TIME_FMT +INIT_BEG = {{cdate}} + +# End time for METplus run - must match INIT_TIME_FMT +INIT_END = {{cdate}} + +# Increment between METplus runs (in seconds if no units are specified). +# Must be >= 60 seconds. +INIT_INCREMENT = 3600 + +# List of forecast leads to process for each run time (init or valid) +# In hours if units are not specified +# If unset, defaults to 0 (don't loop through forecast leads) +LEAD_SEQ = {{fhr_list}} +# +# Order of loops to process data - Options are times, processes +# Not relevant if only one item is in the PROCESS_LIST +# times = run all wrappers in the PROCESS_LIST for a single run time, then +# increment the run time and run all wrappers again until all times have +# been evaluated. +# processes = run the first wrapper in the PROCESS_LIST for all times +# specified, then repeat for the next item in the PROCESS_LIST until all +# wrappers have been run +# +LOOP_ORDER = times +# +# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. +# +LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} +# +# Specify the name of the METplus log file. +# +LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} +# +# Specify the location and name of the final METplus conf file. +# +METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}} +# +# Location of MET configuration file to pass to {{MetplusToolName}}. +# +# References PARM_BASE, which is the location of the parm directory +# corresponding to the ush directory of the run_metplus.py script that +# is called or the value of the environment variable METPLUS_PARM_BASE +# if set. +# +{{METPLUS_TOOL_NAME}}_CONFIG_FILE = {PARM_BASE}/met_config/{{MetplusToolName}}Config_wrapped + +{{METPLUS_TOOL_NAME}}_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA +#{{METPLUS_TOOL_NAME}}_OBS_QUALITY_EXC = + +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_TIME_INTERP_METHOD = + +#{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = BILIN +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 2 + +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_FHO = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTS = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTS = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CNT = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SL1L2 = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SAL1L2 = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VL1L2 = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VAL1L2 = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VCNT = +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PCT = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PSTD = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PJC = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PRC = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECNT = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_RPS = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MPR = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ORANK = + +{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1 +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = True + +#{{METPLUS_TOOL_NAME}}_HSS_EC_VALUE = + +# +# Observation data time window(s). +# +OBS_WINDOW_BEGIN = -1799 +OBS_WINDOW_END = 1800 +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_END = {OBS_WINDOW_END} + +# Optional list of offsets to look for point observation data +{{METPLUS_TOOL_NAME}}_OFFSETS = 0 +# +# Name to identify model (forecast) data in output. +# +# The variable MODEL is recorded in the stat files, and the data in +# these files is then plotted (e.g. using METViewer). Here, we add a +# suffix to MODEL that identifies the data as ensemble-probabilistic. +# This makes it easier to identify each curve. +# +MODEL = {{vx_fcst_model_name}}_ensprob +# +# Name to identify observation data in output. +# +OBTYPE = {{obtype}} +# +# Value to enter under the DESC column in the output stat file. +# +{{METPLUS_TOOL_NAME}}_DESC = NA + +# Regrid to specified grid. Indicate NONE if no regridding, or the grid id +# (e.g. G212) +{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = NONE +{{METPLUS_TOOL_NAME}}_REGRID_METHOD = BILIN +{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2 + +{{METPLUS_TOOL_NAME}}_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} + +# sets the -obs_valid_beg command line argument (optional) +# not used for this example +#{{METPLUS_TOOL_NAME}}_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H} + +# sets the -obs_valid_end command line argument (optional) +# not used for this example +#{{METPLUS_TOOL_NAME}}_OBS_VALID_END = {valid?fmt=%Y%m%d_%H} + +# Verification Masking regions +# Indicate which grid and polygon masking region, if applicable +{{METPLUS_TOOL_NAME}}_GRID = + +# List of full path to poly masking files. NOTE: Only short lists of poly +# files work (those that fit on one line), a long list will result in an +# environment variable that is too long, resulting in an error. For long +# lists of poly masking files (i.e. all the mask files in the NCEP_mask +# directory), define these in the METplus {{MetplusToolName}} configuration file. +{{METPLUS_TOOL_NAME}}_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly +{{METPLUS_TOOL_NAME}}_STATION_ID = + +# Message types, if all message types are to be returned, leave this empty, +# otherwise indicate the message types of interest. +{{METPLUS_TOOL_NAME}}_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} + +# set to True to run {{MetplusToolName}} once for each name/level combination +# set to False to run {{MetplusToolName}} once per run time including all fields +{{METPLUS_TOOL_NAME}}_ONCE_PER_FIELD = False +# +# List of forecast and corresponding observation fields to process. +# Note that the forecast variable name must exactly match the name of a +# variable in the forecast input file(s). +# +{#- +Import the file containing jinja macros. +#} +{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %} + +{#- +Set the probabilistic threshold to be used for the forecast field. If +necessary, this can be changed to be an input parameter in the calling +script instead of a hard-coded value as below. +#} +{%- set thresh_fcst_prob = '==0.1' %} + +{#- +Jinja requires certain variables to be defined globally within the template +before they can be used in if-statements and other scopes (see Jinja +scoping rules). Define such variables. +#} +{%- set level_fcst = '' %} +{%- set level_obs = '' %} +{%- set indx_level_fcst = '' %} + +{%- set valid_threshes_fcst = [] %} +{%- set valid_threshes_obs = [] %} +{%- set thresh_fcst = '' %} +{%- set thresh_obs = '' %} +{%- set indx_thresh_fcst = '' %} +{%- set thresh_fcst_and_or = '' %} + +{%- set opts_indent = '' %} +{%- set opts_indent_len = '' %} +{%- set tmp = '' %} +{%- set error_msg = '' %} + +{#- +Make sure that the set of field groups for forecasts and observations +are identical. +#} +{%- set fgs_fcst = vx_config_dict['fcst'].keys()|list %} +{%- set fgs_obs = vx_config_dict['obs'].keys()|list %} +{%- if (fgs_fcst != fgs_obs) %} + {%- set error_msg = '\n' ~ +'The list of valid field groups for forecasts (fgs_fcst) must be identical\n' ~ +'to that for observations (fgs_obs) but isn\'t:\n' ~ +' fgs_fcst = ' ~ fgs_fcst ~ '\n' ~ +' fgs_obs = ' ~ fgs_obs %} + {{metplus_macros.print_err_and_quit(error_msg)}} +{%- endif %} + +{#- +Extract the lists of forecast and observation dictionaries containing +the valid fields, levels, and thresholds corresponding to the specified +field group (input_field_group). Note that it would be simpler to have +these be just dictionaries in which the keys are the field names (instead +of them being LISTS of dictionaries in which each dictionary contains a +single key that is the field name), but that approach cannot be used here +because it is possible for field names to be repeated (for both forecasts +and observations). For example, in the observations, the field name +'PRWE' appears more than once, each time with a different threshold, and +the combination of name and threshold is what constitutes a unique field, +not just the name by itself. +#} +{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %} +{%- set fields_levels_threshes_obs = vx_config_dict['obs'][input_field_group] %} + +{#- +Reset the specified forecast level so that if it happens to be an +accumulation (e.g. 'A03'), the leading zeros in front of the hour are +stipped out (e.g. reset to 'A3'). +#} +{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} + +{#- +Ensure that the specified input forecast level(s) (input_level_fcst) and +threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the +set(s) of valid forecast levels and thresholds, respectively, specified +in fields_levels_threshes_fcst. +#} +{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }} +{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }} + +{#- +For convenience, create lists of valid forecast and observation field +names. +#} +{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %} +{%- set valid_fields_fcst = [] %} +{%- for i in range(0,num_valid_fields_fcst) %} + {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %} + {%- set tmp = valid_fields_fcst.append(field) %} +{%- endfor %} + +{%- set valid_fields_obs = [] %} +{%- set num_valid_fields_obs = fields_levels_threshes_obs|length %} +{%- for i in range(0,num_valid_fields_obs) %} + {%- set field = fields_levels_threshes_obs[i].keys()|list|join('') %} + {%- set tmp = valid_fields_obs.append(field) %} +{%- endfor %} + +{#- +Ensure that the number of valid fields for forecasts is equal to that +for the observations. +#} +{%- set num_valid_fields = 0 %} +{%- if (num_valid_fields_fcst != num_valid_fields_obs) %} + {%- set error_msg = '\n' ~ +'The number of valid forecast fields (num_valid_fields_fcst) must be\n' ~ +'equal to the number of valid observation fields (num_valid_fields_obs)\n' ~ +'but isn\'t:\n' ~ +' num_valid_fields_fcst = ' ~ num_valid_fields_fcst ~ '\n' ~ +' num_valid_fields_obs = ' ~ num_valid_fields_obs ~ '\n' ~ +'The lists of valid forecast and observation fields are:\n' ~ +' valid_fields_fcst = ' ~ valid_fields_fcst ~ '\n' ~ +' valid_fields_obs = ' ~ valid_fields_obs ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} +{%- else %} + {%- set num_valid_fields = num_valid_fields_fcst %} +{%- endif %} + +{#- +Loop over the valid fields and set field names, levels, thresholds, and/ +or options for each field, both for forecasts and for obseratiions, in +the METplus configuration file. +#} +{%- set ns = namespace(var_count = 0) %} + +{#- +This outer for-loop is included to make this code as similar as possible +to the one in GridStat_ensprob.conf. There, treat_fcst_as_prob takes on +both True and False values, although here it only takes on the value +True (which makes the loop redundant). It is not clear why it doesn't +need to be set to False. This is being investigated (12/13/2023). +#} +{%- for treat_fcst_as_prob in [True] %} + + {%- for i in range(0,num_valid_fields) %} + + {%- set field_fcst = valid_fields_fcst[i] %} + {%- set field_obs = valid_fields_obs[i] %} + +{#- +For convenience, create lists of valid forecast and observation levels +for the current field. Then check that the number of valid levels for +forecasts is the same as that for observations. +#} + {%- set valid_levels_fcst = fields_levels_threshes_fcst[i][field_fcst].keys()|list %} + {%- set valid_levels_obs = fields_levels_threshes_obs[i][field_obs].keys()|list %} + +{#- +Extract dictionary of valid forecast levels (the dictionary keys) and +corresponding lists of valid thresholds (the values) for each level. +Then loop over these levels and corresponding lists of thresholds to set +both the forecast and observation field names, levels, thresholds, and/or +options. +#} + {%- set valid_levels_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst] %} + {%- for level_fcst, valid_threshes_fcst in valid_levels_threshes_fcst.items() %} + + {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %} + + {%- for thresh_fcst in valid_threshes_fcst %} + + {%- if (input_thresh_fcst == 'all') or (input_thresh_fcst == thresh_fcst) %} +{#- +Increment the METplus variable counter. +#} + {%- set ns.var_count = ns.var_count+1 %} + +{#- +Set forecast field name. +#} + {%- set thresh_fcst_and_or = thresh_fcst|replace("&&", ".and.") %} + {%- set thresh_fcst_and_or = thresh_fcst_and_or|replace("||", ".or.") %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{level_fcst}}_ENS_FREQ_{{thresh_fcst_and_or}} + +{#- +Set forecast field level. +#} +FCST_VAR{{ns.var_count}}_LEVELS = {{level_fcst}} + +{#- +Set forecast field threshold. +Note that since the forecast field being read in is actually a field of +probabilities, we set the forecast threshold to a probabilistic one +(thresh_fcst_prob) and not to the physical threshold (thresh_fcst) in +the dictionary of forecast field names, levels, and thresholds that we +are looping over. +#} +FCST_VAR{{ns.var_count}}_THRESH = {{thresh_fcst_prob}} + +{#- +Set forecast field options. +#} + {%- set opts_indent_len = 20 %} + {%- if (ns.var_count > 9) and (ns.var_count <= 99) %} + {%- set opts_indent_len = opts_indent_len + 1 %} + {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %} + {%- set opts_indent_len = opts_indent_len + 2 %} + {%- elif (ns.var_count > 999) %} + {%- set opts_indent_len = opts_indent_len + 3 %} + {%- endif %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'ADPSFC' %} + + {%- if field_fcst == 'HGT' %} +FCST_VAR{{ns.var_count}}_OPTIONS = desc = "CEILING"; + {%- elif field_fcst == 'VIS' %} +FCST_VAR{{ns.var_count}}_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- endif %} + + {%- endif %} + +{#- +Set observation field name. +#} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}} + +{#- +Set observation field level. +#} + {%- set indx_level_fcst = valid_levels_fcst.index(level_fcst) %} + {%- set level_obs = valid_levels_obs[indx_level_fcst] %} +OBS_VAR{{ns.var_count}}_LEVELS = {{level_obs}} + +{#- +Set observation field threshold. Note that no observation thresholds +are included in the METplus configuration file if input_thresh_fcst is +set to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +Set the list of valid observation thresholds to the one corresponding to +the current observation level (level_obs). +#} + {%- set valid_threshes_obs = fields_levels_threshes_obs[i][field_obs][level_obs] %} +{#- +Set the observation threshold. This is given by the element in the list +of valid observation thresholds that has the same index as that of the +current forcast threshold (thresh_fcst) in the list of valid forecast +thresholds. +#} + {%- set indx_thresh_fcst = valid_threshes_fcst.index(thresh_fcst) %} + {%- set thresh_obs = valid_threshes_obs[indx_thresh_fcst] %} +OBS_VAR{{ns.var_count}}_THRESH = {{thresh_obs}} + {%- endif %} + +{#- +Set observation field options. +#} + {%- set opts_indent_len = opts_indent_len - 1 %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'ADPSFC' %} + + {%- if field_obs == 'CEILING' %} +OBS_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 215; +{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- elif field_obs == 'VIS' %} +OBS_VAR{{ns.var_count}}_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- endif %} + + {%- elif input_field_group == 'ADPUPA' %} + + {%- if field_obs == 'CAPE' %} +OBS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; +{{opts_indent}}cnt_logic = UNION; + {%- elif field_obs == 'PBL' %} +OBS_VAR{{ns.var_count}}_OPTIONS = desc = "TKE"; + {%- endif %} + + {%- endif %} +{#- +Print out a newline to separate the settings for the current field (both +forecast and observation settings) from those for the next field. +#} + {{- '\n' }} + + {%- endif %} + {%- endfor %} + + {%- endif %} + + {%- endfor %} + {%- endfor %} +{%- endfor %} +# +# Forecast data description variables +# +FCST_IS_PROB = True +FCST_PROB_IN_GRIB_PDS = False + +# End of [config] section and start of [dir] section. +[dir] +# +# Directory containing observation input to {{MetplusToolName}}. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{obs_input_dir}} +# +# Directory containing forecast input to {{MetplusToolName}}. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}} +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR = +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR = +# +# Directory in which to write output from {{MetplusToolName}}. +# +# OUTPUT_BASE apparently has to be set to something; it cannot be left +# to its default value. But it is not explicitly used elsewhere in this +# configuration file. +# +OUTPUT_BASE = {{output_base}} +{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}} +# +# Directory for staging data. +# +STAGING_DIR = {{staging_dir}} + +# End of [dir] section and start of [filename_templates] section. +[filename_templates] +# +# Template for observation input to {{MetplusToolName}} relative to +# OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{obs_input_fn_template}} +# +# Template for forecast input to {{MetplusToolName}} relative to +# FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}} +# +# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR. +# +{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_TEMPLATE = diff --git a/parm/metplus/PointStat_ensprob_ADPSFC.conf b/parm/metplus/PointStat_ensprob_ADPSFC.conf deleted file mode 100644 index c9333b2c81..0000000000 --- a/parm/metplus/PointStat_ensprob_ADPSFC.conf +++ /dev/null @@ -1,415 +0,0 @@ -# Ensemble probabilistic PointStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PointStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {POINT_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to PointStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -POINT_STAT_CONFIG_FILE = {PARM_BASE}/met_config/PointStatConfig_wrapped - -POINT_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA -#POINT_STAT_OBS_QUALITY_EXC = - -POINT_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST -#POINT_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = - -#POINT_STAT_INTERP_VLD_THRESH = -#POINT_STAT_INTERP_SHAPE = -POINT_STAT_INTERP_TYPE_METHOD = BILIN -POINT_STAT_INTERP_TYPE_WIDTH = 2 - -#POINT_STAT_OUTPUT_FLAG_FHO = -#POINT_STAT_OUTPUT_FLAG_CTC = -#POINT_STAT_OUTPUT_FLAG_CTS = -#POINT_STAT_OUTPUT_FLAG_MCTC = -#POINT_STAT_OUTPUT_FLAG_MCTS = -#POINT_STAT_OUTPUT_FLAG_CNT = -#POINT_STAT_OUTPUT_FLAG_SL1L2 = -#POINT_STAT_OUTPUT_FLAG_SAL1L2 = -#POINT_STAT_OUTPUT_FLAG_VL1L2 = -#POINT_STAT_OUTPUT_FLAG_VAL1L2 = -#POINT_STAT_OUTPUT_FLAG_VCNT = -POINT_STAT_OUTPUT_FLAG_PCT = STAT -POINT_STAT_OUTPUT_FLAG_PSTD = STAT -POINT_STAT_OUTPUT_FLAG_PJC = STAT -POINT_STAT_OUTPUT_FLAG_PRC = STAT -#POINT_STAT_OUTPUT_FLAG_ECNT = -#POINT_STAT_OUTPUT_FLAG_RPS = -#POINT_STAT_OUTPUT_FLAG_ECLV = -#POINT_STAT_OUTPUT_FLAG_MPR = -#POINT_STAT_OUTPUT_FLAG_ORANK = - -POINT_STAT_CLIMO_CDF_BINS = 1 -#POINT_STAT_CLIMO_CDF_CENTER_BINS = False -#POINT_STAT_CLIMO_CDF_WRITE_BINS = True - -#POINT_STAT_HSS_EC_VALUE = - -# -# Observation data time window(s). -# -OBS_WINDOW_BEGIN = -1799 -OBS_WINDOW_END = 1800 -OBS_POINT_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} -OBS_POINT_STAT_WINDOW_END = {OBS_WINDOW_END} - -# Optional list of offsets to look for point observation data -POINT_STAT_OFFSETS = 0 -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as ensemble-probabilistic. -# This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensprob -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -POINT_STAT_DESC = NA - -# Regrid to specified grid. Indicate NONE if no regridding, or the grid id -# (e.g. G212) -POINT_STAT_REGRID_TO_GRID = NONE -POINT_STAT_REGRID_METHOD = BILIN -POINT_STAT_REGRID_WIDTH = 2 - -POINT_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# sets the -obs_valid_beg command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H} - -# sets the -obs_valid_end command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_END = {valid?fmt=%Y%m%d_%H} - -# Verification Masking regions -# Indicate which grid and polygon masking region, if applicable -POINT_STAT_GRID = - -# List of full path to poly masking files. NOTE: Only short lists of poly -# files work (those that fit on one line), a long list will result in an -# environment variable that is too long, resulting in an error. For long -# lists of poly masking files (i.e. all the mask files in the NCEP_mask -# directory), define these in the METplus PointStat configuration file. -POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly -POINT_STAT_STATION_ID = - -# Message types, if all message types are to be returned, leave this empty, -# otherwise indicate the message types of interest. -POINT_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} - -# set to True to run PointStat once for each name/level combination -# set to False to run PointStat once per run time including all fields -POINT_STAT_ONCE_PER_FIELD = False -# -# List of forecast and corresponding observation fields to process. -# Note that the forecast variable name must exactly match the name of a -# variable in the forecast input file(s). -# -FCST_VAR1_NAME = TMP_Z2_ENS_FREQ_ge268 -FCST_VAR1_LEVELS = Z2 -FCST_VAR1_THRESH = ==0.1 -OBS_VAR1_NAME = TMP -OBS_VAR1_LEVELS = Z2 -OBS_VAR1_THRESH = ge268 - -FCST_VAR2_NAME = TMP_Z2_ENS_FREQ_ge273 -FCST_VAR2_LEVELS = Z2 -FCST_VAR2_THRESH = ==0.1 -OBS_VAR2_NAME = TMP -OBS_VAR2_LEVELS = Z2 -OBS_VAR2_THRESH = ge273 - -FCST_VAR3_NAME = TMP_Z2_ENS_FREQ_ge278 -FCST_VAR3_LEVELS = Z2 -FCST_VAR3_THRESH = ==0.1 -OBS_VAR3_NAME = TMP -OBS_VAR3_LEVELS = Z2 -OBS_VAR3_THRESH = ge278 - -FCST_VAR4_NAME = TMP_Z2_ENS_FREQ_ge293 -FCST_VAR4_LEVELS = Z2 -FCST_VAR4_THRESH = ==0.1 -OBS_VAR4_NAME = TMP -OBS_VAR4_LEVELS = Z2 -OBS_VAR4_THRESH = ge293 - -FCST_VAR5_NAME = TMP_Z2_ENS_FREQ_ge298 -FCST_VAR5_LEVELS = Z2 -FCST_VAR5_THRESH = ==0.1 -OBS_VAR5_NAME = TMP -OBS_VAR5_LEVELS = Z2 -OBS_VAR5_THRESH = ge298 - -FCST_VAR6_NAME = TMP_Z2_ENS_FREQ_ge303 -FCST_VAR6_LEVELS = Z2 -FCST_VAR6_THRESH = ==0.1 -OBS_VAR6_NAME = TMP -OBS_VAR6_LEVELS = Z2 -OBS_VAR6_THRESH = ge303 - -FCST_VAR7_NAME = DPT_Z2_ENS_FREQ_ge263 -FCST_VAR7_LEVELS = Z2 -FCST_VAR7_THRESH = ==0.1 -OBS_VAR7_NAME = DPT -OBS_VAR7_LEVELS = Z2 -OBS_VAR7_THRESH = ge263 - -FCST_VAR8_NAME = DPT_Z2_ENS_FREQ_ge268 -FCST_VAR8_LEVELS = Z2 -FCST_VAR8_THRESH = ==0.1 -OBS_VAR8_NAME = DPT -OBS_VAR8_LEVELS = Z2 -OBS_VAR8_THRESH = ge268 - -FCST_VAR9_NAME = DPT_Z2_ENS_FREQ_ge273 -FCST_VAR9_LEVELS = Z2 -FCST_VAR9_THRESH = ==0.1 -OBS_VAR9_NAME = DPT -OBS_VAR9_LEVELS = Z2 -OBS_VAR9_THRESH = ge273 - -FCST_VAR10_NAME = DPT_Z2_ENS_FREQ_ge288 -FCST_VAR10_LEVELS = Z2 -FCST_VAR10_THRESH = ==0.1 -OBS_VAR10_NAME = DPT -OBS_VAR10_LEVELS = Z2 -OBS_VAR10_THRESH = ge288 - -FCST_VAR11_NAME = DPT_Z2_ENS_FREQ_ge293 -FCST_VAR11_LEVELS = Z2 -FCST_VAR11_THRESH = ==0.1 -OBS_VAR11_NAME = DPT -OBS_VAR11_LEVELS = Z2 -OBS_VAR11_THRESH = ge293 - -FCST_VAR12_NAME = DPT_Z2_ENS_FREQ_ge298 -FCST_VAR12_LEVELS = Z2 -FCST_VAR12_THRESH = ==0.1 -OBS_VAR12_NAME = DPT -OBS_VAR12_LEVELS = Z2 -OBS_VAR12_THRESH = ge298 - -FCST_VAR13_NAME = WIND_Z10_ENS_FREQ_ge5 -FCST_VAR13_LEVELS = Z10 -FCST_VAR13_THRESH = ==0.1 -OBS_VAR13_NAME = WIND -OBS_VAR13_LEVELS = Z10 -OBS_VAR13_THRESH = ge5 - -FCST_VAR14_NAME = WIND_Z10_ENS_FREQ_ge10 -FCST_VAR14_LEVELS = Z10 -FCST_VAR14_THRESH = ==0.1 -OBS_VAR14_NAME = WIND -OBS_VAR14_LEVELS = Z10 -OBS_VAR14_THRESH = ge10 - -FCST_VAR15_NAME = WIND_Z10_ENS_FREQ_ge15 -FCST_VAR15_LEVELS = Z10 -FCST_VAR15_THRESH = ==0.1 -OBS_VAR15_NAME = WIND -OBS_VAR15_LEVELS = Z10 -OBS_VAR15_THRESH = ge15 - -FCST_VAR16_NAME = TCDC_L0_ENS_FREQ_lt25 -FCST_VAR16_LEVELS = L0 -FCST_VAR16_THRESH = ==0.1 -OBS_VAR16_NAME = TCDC -OBS_VAR16_LEVELS = L0 -OBS_VAR16_THRESH = lt25 - -FCST_VAR17_NAME = TCDC_L0_ENS_FREQ_gt75 -FCST_VAR17_LEVELS = L0 -FCST_VAR17_THRESH = ==0.1 -OBS_VAR17_NAME = TCDC -OBS_VAR17_LEVELS = L0 -OBS_VAR17_THRESH = gt75 - -FCST_VAR18_NAME = VIS_L0_ENS_FREQ_lt1609 -FCST_VAR18_LEVELS = L0 -FCST_VAR18_THRESH = ==0.1 -FCST_VAR18_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } -OBS_VAR18_NAME = VIS -OBS_VAR18_LEVELS = L0 -OBS_VAR18_THRESH = lt1609 -OBS_VAR18_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } - -FCST_VAR19_NAME = VIS_L0_ENS_FREQ_lt8045 -FCST_VAR19_LEVELS = L0 -FCST_VAR19_THRESH = ==0.1 -FCST_VAR19_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } -OBS_VAR19_NAME = VIS -OBS_VAR19_LEVELS = L0 -OBS_VAR19_THRESH = lt8045 -OBS_VAR19_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } - -FCST_VAR20_NAME = VIS_L0_ENS_FREQ_ge8045 -FCST_VAR20_LEVELS = L0 -FCST_VAR20_THRESH = ==0.1 -FCST_VAR20_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } -OBS_VAR20_NAME = VIS -OBS_VAR20_LEVELS = L0 -OBS_VAR20_THRESH = ge8045 -OBS_VAR20_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } - -FCST_VAR21_NAME = HGT_L0_ENS_FREQ_lt152 -FCST_VAR21_LEVELS = L0 -FCST_VAR21_THRESH = ==0.1 -FCST_VAR21_OPTIONS = desc = "CEILING"; -OBS_VAR21_NAME = CEILING -OBS_VAR21_LEVELS = L0 -OBS_VAR21_THRESH = lt152 -OBS_VAR21_OPTIONS = GRIB_lvl_typ = 215; - interp = { type = [ { method = NEAREST; width = 1; } ]; } - -FCST_VAR22_NAME = HGT_L0_ENS_FREQ_lt1520 -FCST_VAR22_LEVELS = L0 -FCST_VAR22_THRESH = ==0.1 -FCST_VAR22_OPTIONS = desc = "CEILING"; -OBS_VAR22_NAME = CEILING -OBS_VAR22_LEVELS = L0 -OBS_VAR22_THRESH = lt1520 -OBS_VAR22_OPTIONS = GRIB_lvl_typ = 215; - interp = { type = [ { method = NEAREST; width = 1; } ]; } - -FCST_VAR23_NAME = HGT_L0_ENS_FREQ_ge914 -FCST_VAR23_LEVELS = L0 -FCST_VAR23_THRESH = ==0.1 -FCST_VAR23_OPTIONS = desc = "CEILING"; -OBS_VAR23_NAME = CEILING -OBS_VAR23_LEVELS = L0 -OBS_VAR23_THRESH = ge914 -OBS_VAR23_OPTIONS = GRIB_lvl_typ = 215; - interp = { type = [ { method = NEAREST; width = 1; } ]; } - -# -# Forecast data description variables -# -FCST_IS_PROB = True -FCST_PROB_IN_GRIB_PDS = False - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to PointStat. -# -OBS_POINT_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to PointStat. -# -FCST_POINT_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from PointStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -POINT_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to PointStat relative to -# OBS_POINT_STAT_INPUT_DIR. -# -OBS_POINT_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to PointStat relative to -# FCST_POINT_STAT_INPUT_DIR. -# -FCST_POINT_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from PointStat relative to POINT_STAT_OUTPUT_DIR. -# -POINT_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_TEMPLATE = diff --git a/parm/metplus/PointStat_ensprob_ADPUPA.conf b/parm/metplus/PointStat_ensprob_ADPUPA.conf deleted file mode 100644 index eab0270c69..0000000000 --- a/parm/metplus/PointStat_ensprob_ADPUPA.conf +++ /dev/null @@ -1,523 +0,0 @@ -# Ensemble probabilistic PointStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PointStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {POINT_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to PointStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -POINT_STAT_CONFIG_FILE = {PARM_BASE}/met_config/PointStatConfig_wrapped - -POINT_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA -#POINT_STAT_OBS_QUALITY_EXC = - -POINT_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST -#POINT_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = - -#POINT_STAT_INTERP_VLD_THRESH = -#POINT_STAT_INTERP_SHAPE = -POINT_STAT_INTERP_TYPE_METHOD = BILIN -POINT_STAT_INTERP_TYPE_WIDTH = 2 - -#POINT_STAT_OUTPUT_FLAG_FHO = -#POINT_STAT_OUTPUT_FLAG_CTC = -#POINT_STAT_OUTPUT_FLAG_CTS = -#POINT_STAT_OUTPUT_FLAG_MCTC = -#POINT_STAT_OUTPUT_FLAG_MCTS = -#POINT_STAT_OUTPUT_FLAG_CNT = -#POINT_STAT_OUTPUT_FLAG_SL1L2 = -#POINT_STAT_OUTPUT_FLAG_SAL1L2 = -#POINT_STAT_OUTPUT_FLAG_VL1L2 = -#POINT_STAT_OUTPUT_FLAG_VAL1L2 = -#POINT_STAT_OUTPUT_FLAG_VCNT = -POINT_STAT_OUTPUT_FLAG_PCT = STAT -POINT_STAT_OUTPUT_FLAG_PSTD = STAT -POINT_STAT_OUTPUT_FLAG_PJC = STAT -POINT_STAT_OUTPUT_FLAG_PRC = STAT -#POINT_STAT_OUTPUT_FLAG_ECNT = -#POINT_STAT_OUTPUT_FLAG_RPS = -#POINT_STAT_OUTPUT_FLAG_ECLV = -#POINT_STAT_OUTPUT_FLAG_MPR = -#POINT_STAT_OUTPUT_FLAG_ORANK = - -POINT_STAT_CLIMO_CDF_BINS = 1 -#POINT_STAT_CLIMO_CDF_CENTER_BINS = False -#POINT_STAT_CLIMO_CDF_WRITE_BINS = True - -#POINT_STAT_HSS_EC_VALUE = - -# -# Observation data time window(s). -# -OBS_WINDOW_BEGIN = -1799 -OBS_WINDOW_END = 1800 -OBS_POINT_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} -OBS_POINT_STAT_WINDOW_END = {OBS_WINDOW_END} - -# Optional list of offsets to look for point observation data -POINT_STAT_OFFSETS = 0 -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as ensemble-probabilistic. -# This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensprob -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -POINT_STAT_DESC = NA - -# Regrid to specified grid. Indicate NONE if no regridding, or the grid id -# (e.g. G212) -POINT_STAT_REGRID_TO_GRID = NONE -POINT_STAT_REGRID_METHOD = BILIN -POINT_STAT_REGRID_WIDTH = 2 - -POINT_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# sets the -obs_valid_beg command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H} - -# sets the -obs_valid_end command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_END = {valid?fmt=%Y%m%d_%H} - -# Verification Masking regions -# Indicate which grid and polygon masking region, if applicable -POINT_STAT_GRID = - -# List of full path to poly masking files. NOTE: Only short lists of poly -# files work (those that fit on one line), a long list will result in an -# environment variable that is too long, resulting in an error. For long -# lists of poly masking files (i.e. all the mask files in the NCEP_mask -# directory), define these in the METplus PointStat configuration file. -POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly -POINT_STAT_STATION_ID = - -# Message types, if all message types are to be returned, leave this empty, -# otherwise indicate the message types of interest. -POINT_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} - -# set to True to run PointStat once for each name/level combination -# set to False to run PointStat once per run time including all fields -POINT_STAT_ONCE_PER_FIELD = False -# -# List of forecast and corresponding observation fields to process. -# Note that the forecast variable name must exactly match the name of a -# variable in the forecast input file(s). -# -FCST_VAR1_NAME = TMP_P850_ENS_FREQ_ge288 -FCST_VAR1_LEVELS = P850 -FCST_VAR1_THRESH = ==0.1 -OBS_VAR1_NAME = TMP -OBS_VAR1_LEVELS = P850 -OBS_VAR1_THRESH = ge288 - -FCST_VAR2_NAME = TMP_P850_ENS_FREQ_ge293 -FCST_VAR2_LEVELS = P850 -FCST_VAR2_THRESH = ==0.1 -OBS_VAR2_NAME = TMP -OBS_VAR2_LEVELS = P850 -OBS_VAR2_THRESH = ge293 - -FCST_VAR3_NAME = TMP_P850_ENS_FREQ_ge298 -FCST_VAR3_LEVELS = P850 -FCST_VAR3_THRESH = ==0.1 -OBS_VAR3_NAME = TMP -OBS_VAR3_LEVELS = P850 -OBS_VAR3_THRESH = ge298 - -FCST_VAR4_NAME = TMP_P700_ENS_FREQ_ge273 -FCST_VAR4_LEVELS = P700 -FCST_VAR4_THRESH = ==0.1 -OBS_VAR4_NAME = TMP -OBS_VAR4_LEVELS = P700 -OBS_VAR4_THRESH = ge273 - -FCST_VAR5_NAME = TMP_P700_ENS_FREQ_ge278 -FCST_VAR5_LEVELS = P700 -FCST_VAR5_THRESH = ==0.1 -OBS_VAR5_NAME = TMP -OBS_VAR5_LEVELS = P700 -OBS_VAR5_THRESH = ge278 - -FCST_VAR6_NAME = TMP_P700_ENS_FREQ_ge283 -FCST_VAR6_LEVELS = P700 -FCST_VAR6_THRESH = ==0.1 -OBS_VAR6_NAME = TMP -OBS_VAR6_LEVELS = P700 -OBS_VAR6_THRESH = ge283 - -FCST_VAR7_NAME = TMP_P500_ENS_FREQ_ge258 -FCST_VAR7_LEVELS = P500 -FCST_VAR7_THRESH = ==0.1 -OBS_VAR7_NAME = TMP -OBS_VAR7_LEVELS = P500 -OBS_VAR7_THRESH = ge258 - -FCST_VAR8_NAME = TMP_P500_ENS_FREQ_ge263 -FCST_VAR8_LEVELS = P500 -FCST_VAR8_THRESH = ==0.1 -OBS_VAR8_NAME = TMP -OBS_VAR8_LEVELS = P500 -OBS_VAR8_THRESH = ge263 - -FCST_VAR9_NAME = TMP_P500_ENS_FREQ_ge268 -FCST_VAR9_LEVELS = P500 -FCST_VAR9_THRESH = ==0.1 -OBS_VAR9_NAME = TMP -OBS_VAR9_LEVELS = P500 -OBS_VAR9_THRESH = ge268 - -FCST_VAR10_NAME = DPT_P850_ENS_FREQ_ge273 -FCST_VAR10_LEVELS = P850 -FCST_VAR10_THRESH = ==0.1 -OBS_VAR10_NAME = DPT -OBS_VAR10_LEVELS = P850 -OBS_VAR10_THRESH = ge273 - -FCST_VAR11_NAME = DPT_P850_ENS_FREQ_ge278 -FCST_VAR11_LEVELS = P850 -FCST_VAR11_THRESH = ==0.1 -OBS_VAR11_NAME = DPT -OBS_VAR11_LEVELS = P850 -OBS_VAR11_THRESH = ge278 - -FCST_VAR12_NAME = DPT_P850_ENS_FREQ_ge283 -FCST_VAR12_LEVELS = P850 -FCST_VAR12_THRESH = ==0.1 -OBS_VAR12_NAME = DPT -OBS_VAR12_LEVELS = P850 -OBS_VAR12_THRESH = ge283 - -FCST_VAR13_NAME = DPT_P700_ENS_FREQ_ge263 -FCST_VAR13_LEVELS = P700 -FCST_VAR13_THRESH = ==0.1 -OBS_VAR13_NAME = DPT -OBS_VAR13_LEVELS = P700 -OBS_VAR13_THRESH = ge263 - -FCST_VAR14_NAME = DPT_P700_ENS_FREQ_ge268 -FCST_VAR14_LEVELS = P700 -FCST_VAR14_THRESH = ==0.1 -OBS_VAR14_NAME = DPT -OBS_VAR14_LEVELS = P700 -OBS_VAR14_THRESH = ge268 - -FCST_VAR15_NAME = DPT_P700_ENS_FREQ_ge273 -FCST_VAR15_LEVELS = P700 -FCST_VAR15_THRESH = ==0.1 -OBS_VAR15_NAME = DPT -OBS_VAR15_LEVELS = P700 -OBS_VAR15_THRESH = ge273 - -FCST_VAR16_NAME = WIND_P850_ENS_FREQ_ge5 -FCST_VAR16_LEVELS = P850 -FCST_VAR16_THRESH = ==0.1 -OBS_VAR16_NAME = WIND -OBS_VAR16_LEVELS = P850 -OBS_VAR16_THRESH = ge5 - -FCST_VAR17_NAME = WIND_P850_ENS_FREQ_ge10 -FCST_VAR17_LEVELS = P850 -FCST_VAR17_THRESH = ==0.1 -OBS_VAR17_NAME = WIND -OBS_VAR17_LEVELS = P850 -OBS_VAR17_THRESH = ge10 - -FCST_VAR18_NAME = WIND_P850_ENS_FREQ_ge15 -FCST_VAR18_LEVELS = P850 -FCST_VAR18_THRESH = ==0.1 -OBS_VAR18_NAME = WIND -OBS_VAR18_LEVELS = P850 -OBS_VAR18_THRESH = ge15 - -FCST_VAR19_NAME = WIND_P700_ENS_FREQ_ge10 -FCST_VAR19_LEVELS = P700 -FCST_VAR19_THRESH = ==0.1 -OBS_VAR19_NAME = WIND -OBS_VAR19_LEVELS = P700 -OBS_VAR19_THRESH = ge10 - -FCST_VAR20_NAME = WIND_P700_ENS_FREQ_ge15 -FCST_VAR20_LEVELS = P700 -FCST_VAR20_THRESH = ==0.1 -OBS_VAR20_NAME = WIND -OBS_VAR20_LEVELS = P700 -OBS_VAR20_THRESH = ge15 - -FCST_VAR21_NAME = WIND_P700_ENS_FREQ_ge20 -FCST_VAR21_LEVELS = P700 -FCST_VAR21_THRESH = ==0.1 -OBS_VAR21_NAME = WIND -OBS_VAR21_LEVELS = P700 -OBS_VAR21_THRESH = ge20 - -FCST_VAR22_NAME = WIND_P500_ENS_FREQ_ge15 -FCST_VAR22_LEVELS = P500 -FCST_VAR22_THRESH = ==0.1 -OBS_VAR22_NAME = WIND -OBS_VAR22_LEVELS = P500 -OBS_VAR22_THRESH = ge15 - -FCST_VAR23_NAME = WIND_P500_ENS_FREQ_ge21 -FCST_VAR23_LEVELS = P500 -FCST_VAR23_THRESH = ==0.1 -OBS_VAR23_NAME = WIND -OBS_VAR23_LEVELS = P500 -OBS_VAR23_THRESH = ge21 - -FCST_VAR24_NAME = WIND_P500_ENS_FREQ_ge26 -FCST_VAR24_LEVELS = P500 -FCST_VAR24_THRESH = ==0.1 -OBS_VAR24_NAME = WIND -OBS_VAR24_LEVELS = P500 -OBS_VAR24_THRESH = ge26 - -FCST_VAR25_NAME = WIND_P250_ENS_FREQ_ge26 -FCST_VAR25_LEVELS = P250 -FCST_VAR25_THRESH = ==0.1 -OBS_VAR25_NAME = WIND -OBS_VAR25_LEVELS = P250 -OBS_VAR25_THRESH = ge26 - -FCST_VAR26_NAME = WIND_P250_ENS_FREQ_ge31 -FCST_VAR26_LEVELS = P250 -FCST_VAR26_THRESH = ==0.1 -OBS_VAR26_NAME = WIND -OBS_VAR26_LEVELS = P250 -OBS_VAR26_THRESH = ge31 - -FCST_VAR27_NAME = WIND_P250_ENS_FREQ_ge36 -FCST_VAR27_LEVELS = P250 -FCST_VAR27_THRESH = ==0.1 -OBS_VAR27_NAME = WIND -OBS_VAR27_LEVELS = P250 -OBS_VAR27_THRESH = ge36 - -FCST_VAR28_NAME = WIND_P250_ENS_FREQ_ge46 -FCST_VAR28_LEVELS = P250 -FCST_VAR28_THRESH = ==0.1 -OBS_VAR28_NAME = WIND -OBS_VAR28_LEVELS = P250 -OBS_VAR28_THRESH = ge46 - -FCST_VAR29_NAME = WIND_P250_ENS_FREQ_ge62 -FCST_VAR29_LEVELS = P250 -FCST_VAR29_THRESH = ==0.1 -OBS_VAR29_NAME = WIND -OBS_VAR29_LEVELS = P250 -OBS_VAR29_THRESH = ge62 - -FCST_VAR30_NAME = HGT_P500_ENS_FREQ_ge5400 -FCST_VAR30_LEVELS = P500 -FCST_VAR30_THRESH = ==0.1 -OBS_VAR30_NAME = HGT -OBS_VAR30_LEVELS = P500 -OBS_VAR30_THRESH = ge5400 - -FCST_VAR31_NAME = HGT_P500_ENS_FREQ_ge5600 -FCST_VAR31_LEVELS = P500 -FCST_VAR31_THRESH = ==0.1 -OBS_VAR31_NAME = HGT -OBS_VAR31_LEVELS = P500 -OBS_VAR31_THRESH = ge5600 - -FCST_VAR32_NAME = HGT_P500_ENS_FREQ_ge5880 -FCST_VAR32_LEVELS = P500 -FCST_VAR32_THRESH = ==0.1 -OBS_VAR32_NAME = HGT -OBS_VAR32_LEVELS = P500 -OBS_VAR32_THRESH = ge5880 - -FCST_VAR33_NAME = CAPE_L0_ENS_FREQ_le1000 -FCST_VAR33_LEVELS = L0 -FCST_VAR33_THRESH = ==0.1 -OBS_VAR33_NAME = CAPE -OBS_VAR33_LEVELS = L0-100000 -OBS_VAR33_THRESH = le1000 -OBS_VAR33_OPTIONS = cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -FCST_VAR34_NAME = CAPE_L0_ENS_FREQ_gt1000.and.lt2500 -FCST_VAR34_LEVELS = L0 -FCST_VAR34_THRESH = ==0.1 -OBS_VAR34_NAME = CAPE -OBS_VAR34_LEVELS = L0-100000 -OBS_VAR34_THRESH = gt1000&<2500 -OBS_VAR34_OPTIONS = cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -FCST_VAR35_NAME = CAPE_L0_ENS_FREQ_gt2500.and.lt4000 -FCST_VAR35_LEVELS = L0 -FCST_VAR35_THRESH = ==0.1 -OBS_VAR35_NAME = CAPE -OBS_VAR35_LEVELS = L0-100000 -OBS_VAR35_THRESH = gt2500&<4000 -OBS_VAR35_OPTIONS = cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -FCST_VAR36_NAME = CAPE_L0_ENS_FREQ_gt2500 -FCST_VAR36_LEVELS = L0 -FCST_VAR36_THRESH = ==0.1 -OBS_VAR36_NAME = CAPE -OBS_VAR36_LEVELS = L0-100000 -OBS_VAR36_THRESH = gt2500 -OBS_VAR36_OPTIONS = cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -FCST_VAR37_NAME = HPBL_Z0_ENS_FREQ_lt500 -FCST_VAR37_LEVELS = Z0 -FCST_VAR37_THRESH = ==0.1 -OBS_VAR37_NAME = PBL -OBS_VAR37_LEVELS = L0 -OBS_VAR37_THRESH = lt500 -OBS_VAR37_OPTIONS = desc = "TKE"; - -FCST_VAR38_NAME = HPBL_Z0_ENS_FREQ_lt1500 -FCST_VAR38_LEVELS = Z0 -FCST_VAR38_THRESH = ==0.1 -OBS_VAR38_NAME = PBL -OBS_VAR38_LEVELS = L0 -OBS_VAR38_THRESH = lt1500 -OBS_VAR38_OPTIONS = desc = "TKE"; - -FCST_VAR39_NAME = HPBL_Z0_ENS_FREQ_gt1500 -FCST_VAR39_LEVELS = Z0 -FCST_VAR39_THRESH = ==0.1 -OBS_VAR39_NAME = PBL -OBS_VAR39_LEVELS = L0 -OBS_VAR39_THRESH = gt1500 -OBS_VAR39_OPTIONS = desc = "TKE"; - -# -# Forecast data description variables -# -FCST_IS_PROB = True -FCST_PROB_IN_GRIB_PDS = False - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to PointStat. -# -OBS_POINT_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to PointStat. -# -FCST_POINT_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from PointStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -POINT_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to PointStat relative to -# OBS_POINT_STAT_INPUT_DIR. -# -OBS_POINT_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to PointStat relative to -# FCST_POINT_STAT_INPUT_DIR. -# -FCST_POINT_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from PointStat relative to POINT_STAT_OUTPUT_DIR. -# -POINT_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_TEMPLATE = diff --git a/parm/metplus/metplus_macros.jinja b/parm/metplus/metplus_macros.jinja new file mode 100644 index 0000000000..94ac5d9485 --- /dev/null +++ b/parm/metplus/metplus_macros.jinja @@ -0,0 +1,150 @@ +{#- +This macro prints out an error message and quits the jinja templater. +#} +{%- macro print_err_and_quit(error_msg) %} + {%- include 'ERROR: ' ~ error_msg %} +{%- endmacro %} +{#- +Given a specified field level that is really an accumulation period, this +macro prints out an "A" followed by the accumulation period (an integer) +with any leading zeros removed. For example, if the level is 'A03', it +prints out 'A3'. +#} +{%- macro get_accumulation_no_zero_pad(level) %} + {%- set first_char = level[0] %} + {%- set the_rest = level[1:] %} + {%- if (first_char == 'A') %} + {{- first_char ~ '%d'%the_rest|int }} + {%- else %} + {{- level }} + {%- endif %} +{%- endmacro %} +{#- +This macro checks whether the specified level (input_level) has a valid +value. input_level may be set to 'all' or to a specific level. If set +to 'all', input_level is not checked because in this case, whatever valid/ +available levels are found will be included in the METplus configuration +file for all specified fields. input_level IS checked if it is set to +any other value because in this case, all the specified fields will use +only that specific level in the METplus configuration file, which implies +that the level must be valid for all such fields. +#} +{%- macro check_level(fields_levels_threshes, input_level) %} + + {%- if input_level != 'all' %} + + {%- set num_valid_fields = fields_levels_threshes|length %} + {%- set valid_fields = [] %} + {%- for i in range(0,num_valid_fields) %} + {%- set field = fields_levels_threshes[i].keys()|list|join('') %} + {%- set tmp = valid_fields.append(field) %} + {%- endfor %} + + {%- for i in range(0,num_valid_fields) %} + {%- set field = valid_fields[i] %} + {%- set valid_levels = fields_levels_threshes[i][field].keys()|list %} + {%- if input_level not in valid_levels %} + {%- set error_msg = '\n' ~ + 'The specified level (input_level) is not in the list of valid levels\n' ~ + '(valid_levels) for the current field (field):\n' ~ + ' field = \'' ~ field ~ '\'\n' ~ + ' valid_levels = ' ~ valid_levels ~ '\n' + ' input_level = \'' ~ input_level ~ '\'\n' + 'input_level must either be set to the string \'all\' (to include all valid\n' ~ + 'values in the verification) or to one of the elements in valid_levels.' %} + {{print_err_and_quit(error_msg)}} + {%- endif %} + {%- endfor %} + + {%- endif %} + +{%- endmacro %} +{#- +This macro checks whether the specified threshold (input_thresh) has a +valid value. input_thresh may be set to 'none', 'all', or a specific +threshold. If set to 'none', input_thresh is not checked for a valid +value since threshold information will not be included in the METplus +configuration file. input_thresh is also not checked for a valid value +if it set to 'all' because in this case, whatever valid/available thresholds +are found will be included in the METplus configuration file for all +specified field and level combination. Finally, input_thresh IS checked +for a valid value if it is set to something other than 'none' and 'all' +because in this case, all specified field and level combinations (where +the latter, depending on the value of input_level, may be either all +valid/available levels or a single one) will use only that specific +threshold in the METplus configuration file, which implies that the +threshold must be valid for all such field and level combinations. +#} +{%- macro check_thresh(fields_levels_threshes, input_level, input_thresh) %} + + {%- if (input_thresh != 'none') and (input_thresh != 'all') %} + + {%- set num_valid_fields = fields_levels_threshes|length %} + {%- set valid_fields = [] %} + {%- for i in range(0,num_valid_fields) %} + {%- set field = fields_levels_threshes[i].keys()|list|join('') %} + {%- set tmp = valid_fields.append(field) %} + {%- endfor %} + + {%- for i in range(0,num_valid_fields) %} + {%- set field = valid_fields[i] %} + {%- set valid_levels = fields_levels_threshes[i][field].keys()|list %} + {%- set valid_levels_threshes = fields_levels_threshes[i][field] %} + + {%- for level, valid_threshes in valid_levels_threshes.items() %} + {%- if (input_level == 'all') or (input_level == level) %} + {%- if input_thresh not in valid_threshes %} + {%- set error_msg = '\n' ~ +'The specified threshold (input_thresh) is not in the list of valid\n' ~ +'thresholds (valid_threshes) for the current field (field) and level\n' ~ +'(level) combination:\n' ~ +' field = \'' ~ field ~ '\'\n' ~ +' level = \'' ~ level ~ '\'\n' ~ +' valid_threshes = ' ~ valid_threshes ~ '\n' +' input_thresh = \'' ~ input_thresh ~ '\'' %} +'input_thresh must be set to the string \'all\' (to include in the METplus\n' ~ +'configuration file all thresholds for each valid combination of field and\n' ~ +'level), to the string \'none\' (to include no threshold information in the\n' ~ +'METplus configuration file), or to one of the elements in valid_threshes\n' ~ +'(to include only that specific threshold in the METplus configuration file).' %} + {{print_err_and_quit(error_msg)}} + {%- endif %} + {%- endif %} + + {%- endfor %} + + {%- endfor %} + + {%- endif %} + +{%- endmacro %} +{#- +This macro checks whether, for the given field, the list of thresholds +for all levels are identical. If not, it prints out an error message +and errors out. +#} +{%- macro check_for_identical_threshes_by_level(field, levels_threshes) %} + {%- set avail_levels = levels_threshes[field].keys()|list %} + {%- set num_avail_levels = avail_levels|length %} + {%- set threshes_by_avail_level = levels_threshes[field].values()|list %} + {%- for i in range(1,num_avail_levels) %} + {%- set level = avail_levels[i-1] %} + {%- set threshes = threshes_by_avail_level[i-1] %} + {%- set level_next = avail_levels[i] %} + {%- set threshes_next = threshes_by_avail_level[i] %} + {%- if (threshes_next != threshes) %} + {%- set error_msg = '\n\n' ~ +'For the given field (field), the set of thresholds for the next level\n' ~ +'(threshes_next, level_next) is not equal to that of the current level\n' ~ +'(threshes, level) (note that order of thresholds matters here):\n' ~ +' field = \'' ~ field ~ '\'\n' ~ +' num_avail_levels = ' ~ num_avail_levels ~ '\n' ~ +' level = \'' ~ level ~ '\'\n' ~ +' threshes = ' ~ threshes ~ '\n' ~ +' level_next = \'' ~ level_next ~ '\'\n' ~ +' threshes_next = ' ~ threshes_next ~ '\n' + %} + {{print_err_and_quit(error_msg)}} + {%- endif %} + {%- endfor %} +{%- endmacro %} diff --git a/parm/metplus/vx_config_det.yaml b/parm/metplus/vx_config_det.yaml new file mode 100644 index 0000000000..4c721176c6 --- /dev/null +++ b/parm/metplus/vx_config_det.yaml @@ -0,0 +1,204 @@ +# +# This configuration file specifies the field groups, fields, levels, +# and thresholds to use for DETERMINISTIC verification. The format is +# as follows: +# +# FIELD_GROUP1: +# FIELD1: +# LEVEL1: list_of_thresholds +# LEVEL2: list_of_thresholds +# ... +# FIELD2: +# LEVEL1: list_of_thresholds +# LEVEL2: list_of_thresholds +# ... +# ... +# +# FIELD_GROUP2: +# FIELD1: +# LEVEL1: list_of_thresholds +# LEVEL2: list_of_thresholds +# ... +# FIELD2: +# LEVEL1: list_of_thresholds +# LEVEL2: list_of_thresholds +# ... +# ... +# +# ... +# +# If the threshold list for a given combination of field group, field, +# and level is set to the empty string ([]), then all values of that +# field will be included in the verification. +# +# Both the keys that represent field groups, fields, and levels and the +# strings in the list of thresholds may contain the separator string "%%" +# that separates the value of the quantity for the forecast from that for +# the observations. For example, if a field is set to +# +# RETOP%%EchoTop18 +# +# it means the name of the field in the forecast data is RETOP while its +# name in the observations is EchoTop18. +# +APCP: + APCP: + A1: ['gt0.0', 'ge0.254', 'ge0.508', 'ge1.27', 'ge2.54'] + A3: ['gt0.0', 'ge0.254', 'ge0.508', 'ge1.27', 'ge2.54', 'ge3.810', 'ge6.350'] + A6: ['gt0.0', 'ge0.254', 'ge0.508', 'ge1.27', 'ge2.54', 'ge3.810', 'ge6.350', 'ge8.890', 'ge12.700'] + A24: ['gt0.0', 'ge0.254', 'ge0.508', 'ge1.27', 'ge2.54', 'ge3.810', 'ge6.350', 'ge8.890', 'ge12.700', 'ge25.400'] +ASNOW: + ASNOW: + A6: ['gt0.0', 'ge2.54', 'ge5.08', 'ge10.16', 'ge20.32'] +REFC: + REFC%%MergedReflectivityQCComposite: + L0%%Z500: ['ge20', 'ge30', 'ge40', 'ge50'] +RETOP: + RETOP%%EchoTop18: + L0%%Z500: ['ge20', 'ge30', 'ge40', 'ge50'] +ADPSFC: + TMP: + Z2: [] + DPT: + Z2: [] + RH: + Z2: [] + UGRD: + Z10: ['ge2.572'] + VGRD: + Z10: ['ge2.572'] + WIND: + Z10: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433'] + PRMSL: + Z0: [] + TCDC: + L0: [] + VIS: + L0: ['lt805', 'lt1609', 'lt4828', 'lt8045', 'ge8045', 'lt16090'] + GUST: + Z0: [] + HGT%%CEILING: + L0: ['lt152', 'lt305', 'lt914', 'lt1520', 'lt3040', 'ge914'] + SPFH: + Z2: [] + CRAIN%%PRWE: + L0%%Z0: ['ge1.0%%ge161&&le163'] + CSNOW%%PRWE: + L0%%Z0: ['ge1.0%%ge171&&le173'] + CFRZR%%PRWE: + L0%%Z0: ['ge1.0%%ge164&&le166'] + CICEP%%PRWE: + L0%%Z0: ['ge1.0%%ge174&&le176'] +ADPUPA: + TMP: + P1000: [] + P925: [] + P850: [] + P700: [] + P500: [] + P400: [] + P300: [] + P250: [] + P200: [] + P150: [] + P100: [] + P50: [] + P20: [] + P10: [] + RH: + P1000: [] + P925: [] + P850: [] + P700: [] + P500: [] + P400: [] + P300: [] + P250: [] + DPT: + P1000: [] + P925: [] + P850: [] + P700: [] + P500: [] + P400: [] + P300: [] + UGRD: + P1000: ['ge2.572'] + P925: ['ge2.572'] + P850: ['ge2.572'] + P700: ['ge2.572'] + P500: ['ge2.572'] + P400: ['ge2.572'] + P300: ['ge2.572'] + P250: ['ge2.572'] + P200: ['ge2.572'] + P150: ['ge2.572'] + P100: ['ge2.572'] + P50: ['ge2.572'] + P20: ['ge2.572'] + P10: ['ge2.572'] + VGRD: + P1000: ['ge2.572'] + P925: ['ge2.572'] + P850: ['ge2.572'] + P700: ['ge2.572'] + P500: ['ge2.572'] + P400: ['ge2.572'] + P300: ['ge2.572'] + P250: ['ge2.572'] + P200: ['ge2.572'] + P150: ['ge2.572'] + P100: ['ge2.572'] + P50: ['ge2.572'] + P20: ['ge2.572'] + P10: ['ge2.572'] + WIND: + P1000: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] + P925: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] + P850: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] + P700: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] + P500: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] + P400: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] + P300: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] + P250: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] + P200: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] + P150: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] + P100: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] + P50: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] + P20: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] + P10: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] + HGT: + P1000: [] + P950: [] + P925: [] + P850: [] + P700: [] + P500: [] + P400: [] + P300: [] + P250: [] + P200: [] + P150: [] + P100: [] + P50: [] + P20: [] + P10: [] + SPFH: + P1000: [] + P850: [] + P700: [] + P500: [] + P400: [] + P300: [] + CAPE: + L0%%L0-100000: + ['gt500', 'gt1000', 'gt1500', 'gt2000', 'gt3000', 'gt4000'] + HPBL%%PBL: + Z0%%L0: + [] + HGT%%PBL: + L0: + [] + CAPE%%MLCAPE: + L0-90%%L0: + ['gt500', 'gt1000', 'gt1500', 'gt2000', 'gt3000', 'gt4000'] diff --git a/parm/metplus/vx_config_ens.yaml b/parm/metplus/vx_config_ens.yaml new file mode 100644 index 0000000000..5f55254a4c --- /dev/null +++ b/parm/metplus/vx_config_ens.yaml @@ -0,0 +1,54 @@ +# +# This configuration file specifies the field groups, fields, levels, +# and thresholds to use for ENSEMBLE verification. The format is the +# same as the one used in the configuration file for deterministic +# verification (vx_config_det.yaml); please see the documentation in +# that file for details. +# +APCP: + APCP: + A1: ['gt0.0', 'ge0.254', 'ge0.508', 'ge2.54'] + A3: ['gt0.0', 'ge0.508', 'ge2.54', 'ge6.350'] + A6: ['gt0.0', 'ge2.54', 'ge6.350', 'ge12.700'] + A24: ['gt0.0', 'ge6.350', 'ge12.700', 'ge25.400'] +ASNOW: + ASNOW: + A6: ['gt0.0', 'ge2.54', 'ge5.08', 'ge10.16', 'ge20.32'] +REFC: + REFC%%MergedReflectivityQCComposite: + L0%%Z500: ['ge20', 'ge30', 'ge40', 'ge50'] +RETOP: + RETOP%%EchoTop18: + L0%%Z500: ['ge20', 'ge30', 'ge40', 'ge50'] +ADPSFC: + TMP: + Z2: ['ge268', 'ge273', 'ge278', 'ge293', 'ge298', 'ge303'] + DPT: + Z2: ['ge263', 'ge268', 'ge273', 'ge288', 'ge293', 'ge298'] + WIND: + Z10: ['ge5', 'ge10', 'ge15'] + TCDC: + L0: ['lt25', 'gt75'] + VIS: + L0: ['lt1609', 'lt8045', 'ge8045'] + HGT%%CEILING: + L0: ['lt152', 'lt305', 'lt914'] +ADPUPA: + TMP: + P850: ['ge288', 'ge293', 'ge298'] + P700: ['ge273', 'ge278', 'ge283'] + P500: ['ge258', 'ge263', 'ge268'] + DPT: + P850: ['ge273', 'ge278', 'ge283'] + P700: ['ge263', 'ge268', 'ge273'] + WIND: + P850: ['ge5', 'ge10', 'ge15'] + P700: ['ge10', 'ge15', 'ge20'] + P500: ['ge15', 'ge21', 'ge26'] + P250: ['ge26', 'ge31', 'ge36', 'ge46', 'ge62'] + HGT: + P500: ['ge5400', 'ge5600', 'ge5880'] + CAPE: + L0%%L0-100000: ['le1000', 'gt1000&<2500', 'ge2500&<4000', 'ge2500'] + HPBL%%PBL: + Z0%%L0: ['lt500', 'lt1500', 'gt1500'] diff --git a/parm/wflow/verify_det.yaml b/parm/wflow/verify_det.yaml index 4c6b43ca25..79f04eeaaa 100644 --- a/parm/wflow/verify_det.yaml +++ b/parm/wflow/verify_det.yaml @@ -21,6 +21,18 @@ default_task_verify_det: &default_task_verify_det queue: '&QUEUE_DEFAULT;' walltime: 00:30:00 +task_parse_vx_config_det: + <<: *default_task_verify_det + command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_PARSE_VX_CONFIG"' + envars: + <<: *default_vars + DET_OR_ENS: 'det' + join: !cycstr '&LOGDIR;/{{ jobname }}&LOGEXT;' + walltime: 00:05:00 + # No dependencies are needed for this task because as long as any deterministic + # verification tasks are going to be run (i.e. as long as this configuration + # file is included in the workflow), then this task must be launched. + metatask_GridStat_CCPA_all_accums_all_mems: var: ACCUM_HH: '{% for ah in verification.VX_APCP_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' @@ -41,6 +53,8 @@ metatask_GridStat_CCPA_all_accums_all_mems: OBTYPE: 'CCPA' ENSMEM_INDX: "#mem#" SLASH_ENSMEM_SUBDIR_OR_NULL: '{% if global.DO_ENSEMBLE %}{{ "/mem#mem#" }}{% endif %}' + FCST_LEVEL: 'A#ACCUM_HH#' + FCST_THRESH: 'all' walltime: 02:00:00 dependency: and: @@ -50,6 +64,9 @@ metatask_GridStat_CCPA_all_accums_all_mems: taskdep_pcpcombine_fcst: attrs: task: run_MET_PcpCombine_fcst_APCP#ACCUM_HH#h_mem#mem# + taskdep_parse_vx_config_det: + attrs: + task: parse_vx_config_det metatask_GridStat_NOHRSC_all_accums_all_mems: var: @@ -71,15 +88,20 @@ metatask_GridStat_NOHRSC_all_accums_all_mems: OBTYPE: 'NOHRSC' ENSMEM_INDX: "#mem#" SLASH_ENSMEM_SUBDIR_OR_NULL: '{% if global.DO_ENSEMBLE %}{{ "/mem#mem#" }}{% endif %}' + FCST_LEVEL: 'A#ACCUM_HH#' + FCST_THRESH: 'all' walltime: 02:00:00 dependency: and: - taskdep: + taskdep_get_obs_nohrsc: attrs: - task: get_obs_mrms + task: get_obs_nohrsc taskdep_pcpcombine_fcst: attrs: task: run_MET_PcpCombine_fcst_ASNOW#ACCUM_HH#h_mem#mem# + taskdep_parse_vx_config_det: + attrs: + task: parse_vx_config_det metatask_GridStat_MRMS_all_mems: var: @@ -99,16 +121,21 @@ metatask_GridStat_MRMS_all_mems: OBTYPE: 'MRMS' ENSMEM_INDX: "#mem#" SLASH_ENSMEM_SUBDIR_OR_NULL: '{% if global.DO_ENSEMBLE %}{{ "/mem#mem#" }}{% endif %}' + FCST_LEVEL: 'L0' + FCST_THRESH: 'all' walltime: 02:00:00 dependency: and: - taskdep: + taskdep_get_obs_mrms: attrs: task: get_obs_mrms datadep_post_files_exist: attrs: age: 00:00:00:30 text: !cycstr '{{ workflow.EXPTDIR }}/@Y@m@d@H/post_files_exist_mem#mem#.txt' + taskdep_parse_vx_config_det: + attrs: + task: parse_vx_config_det metatask_PointStat_NDAS_all_mems: var: @@ -128,6 +155,8 @@ metatask_PointStat_NDAS_all_mems: ACCUM_HH: '01' ENSMEM_INDX: "#mem#" SLASH_ENSMEM_SUBDIR_OR_NULL: '{% if global.DO_ENSEMBLE %}{{ "/mem#mem#" }}{% endif %}' + FCST_LEVEL: 'all' + FCST_THRESH: 'all' walltime: 01:00:00 dependency: and: @@ -138,3 +167,6 @@ metatask_PointStat_NDAS_all_mems: attrs: age: 00:00:00:30 text: !cycstr '{{ workflow.EXPTDIR }}/@Y@m@d@H/post_files_exist_mem#mem#.txt' + taskdep_parse_vx_config_det: + attrs: + task: parse_vx_config_det diff --git a/parm/wflow/verify_ens.yaml b/parm/wflow/verify_ens.yaml index cf0a8d1dac..3f7638587d 100644 --- a/parm/wflow/verify_ens.yaml +++ b/parm/wflow/verify_ens.yaml @@ -21,6 +21,18 @@ default_task_verify_ens: &default_task_verify_ens queue: '&QUEUE_DEFAULT;' walltime: 01:00:00 +task_parse_vx_config_ens: + <<: *default_task_verify_ens + command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_PARSE_VX_CONFIG"' + envars: + <<: *default_vars + DET_OR_ENS: 'ens' + join: !cycstr '&LOGDIR;/{{ jobname }}&LOGEXT;' + walltime: 00:05:00 + # No dependencies are needed for this task because as long as any ensemble + # verification tasks are going to be run (i.e. as long as this configuration + # file is included in the workflow), then this task must be launched. + metatask_GenEnsProd_EnsembleStat_CCPA: var: ACCUM_HH: '{% for ah in verification.VX_APCP_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' @@ -34,6 +46,8 @@ metatask_GenEnsProd_EnsembleStat_CCPA: VAR: APCP METPLUSTOOLNAME: 'GENENSPROD' OBTYPE: 'CCPA' + FCST_LEVEL: 'A#ACCUM_HH#' + FCST_THRESH: 'all' dependency: and: # The PcpCombine task for obs must be complete because this GenEnsProd @@ -45,11 +59,15 @@ metatask_GenEnsProd_EnsembleStat_CCPA: metataskdep_pcpcombine_fcst: attrs: metatask: PcpCombine_fcst_APCP#ACCUM_HH#h_all_mems + taskdep_parse_vx_config_ens: + attrs: + task: parse_vx_config_ens task_run_MET_EnsembleStat_vx_APCP#ACCUM_HH#h: <<: *task_GenEnsProd_CCPA envars: <<: *envars_GenEnsProd_CCPA METPLUSTOOLNAME: 'ENSEMBLESTAT' + FCST_THRESH: 'none' dependency: taskdep_genensprod: attrs: @@ -68,6 +86,8 @@ metatask_GenEnsProd_EnsembleStat_NOHRSC: VAR: ASNOW METPLUSTOOLNAME: 'GENENSPROD' OBTYPE: 'NOHRSC' + FCST_LEVEL: 'A#ACCUM_HH#' + FCST_THRESH: 'all' dependency: and: # The PcpCombine task for obs must be complete because this GenEnsProd @@ -76,11 +96,15 @@ metatask_GenEnsProd_EnsembleStat_NOHRSC: metataskdep_pcpcombine_fcst: attrs: metatask: PcpCombine_fcst_ASNOW#ACCUM_HH#h_all_mems + taskdep_parse_vx_config_ens: + attrs: + task: parse_vx_config_ens task_run_MET_EnsembleStat_vx_ASNOW#ACCUM_HH#h: <<: *task_GenEnsProd_NOHRSC envars: <<: *envars_GenEnsProd_NOHRSC METPLUSTOOLNAME: 'ENSEMBLESTAT' + FCST_THRESH: 'none' dependency: and: taskdep: @@ -103,24 +127,31 @@ metatask_GenEnsProd_EnsembleStat_MRMS: VAR: '#VAR#' METPLUSTOOLNAME: 'GENENSPROD' OBTYPE: 'MRMS' + FCST_LEVEL: 'L0' + FCST_THRESH: 'all' dependency: and: - taskdep: - attrs: - task: get_obs_mrms - metataskdep_post_files_exist: &post_files_exist + metataskdep_check_post_output: &check_post_output attrs: metatask: check_post_output_all_mems - + taskdep_parse_vx_config_ens: + attrs: + task: parse_vx_config_ens task_run_MET_EnsembleStat_vx_#VAR#: <<: *task_GenEnsProd_MRMS envars: <<: *envars_GenEnsProd_MRMS METPLUSTOOLNAME: 'ENSEMBLESTAT' + FCST_LEVEL: 'L0' + FCST_THRESH: 'none' dependency: - taskdep: - attrs: - task: run_MET_GenEnsProd_vx_#VAR# + and: + taskdep_get_obs_mrms: + attrs: + task: get_obs_mrms + taskdep_genensprod: + attrs: + task: run_MET_GenEnsProd_vx_#VAR# metatask_GenEnsProd_EnsembleStat_NDAS: var: @@ -135,6 +166,8 @@ metatask_GenEnsProd_EnsembleStat_NDAS: METPLUSTOOLNAME: 'GENENSPROD' OBTYPE: 'NDAS' ACCUM_HH: '01' + FCST_LEVEL: 'all' + FCST_THRESH: 'all' walltime: 02:30:00 dependency: and: @@ -144,8 +177,11 @@ metatask_GenEnsProd_EnsembleStat_NDAS: taskdep_pb2nc: attrs: task: run_MET_Pb2nc_obs - metataskdep_post_files_exist: - <<: *post_files_exist + metataskdep_check_post_output: + <<: *check_post_output + taskdep_parse_vx_config_ens: + attrs: + task: parse_vx_config_ens task_run_MET_EnsembleStat_vx_#VAR#: <<: *task_GenEnsProd_NDAS envars: @@ -174,6 +210,8 @@ metatask_GridStat_CCPA_ensmeanprob_all_accums: METPLUSTOOLNAME: 'GRIDSTAT' OBTYPE: 'CCPA' ACCUM_HH: '#ACCUM_HH#' + FCST_LEVEL: 'A#ACCUM_HH#' + FCST_THRESH: 'all' dependency: taskdep: attrs: @@ -196,6 +234,8 @@ metatask_GridStat_NOHRSC_ensmeanprob_all_accums: METPLUSTOOLNAME: 'GRIDSTAT' OBTYPE: 'NOHRSC' ACCUM_HH: '#ACCUM_HH#' + FCST_LEVEL: 'A#ACCUM_HH#' + FCST_THRESH: 'all' dependency: taskdep: attrs: @@ -214,6 +254,8 @@ metatask_GridStat_MRMS_ensprob: VAR: '#VAR#' METPLUSTOOLNAME: 'GRIDSTAT' OBTYPE: 'MRMS' + FCST_LEVEL: 'L0' + FCST_THRESH: 'all' dependency: taskdep: attrs: @@ -236,6 +278,8 @@ metatask_PointStat_NDAS_ensmeanprob: METPLUSTOOLNAME: 'POINTSTAT' OBTYPE: 'NDAS' ACCUM_HH: '01' + FCST_LEVEL: 'all' + FCST_THRESH: 'all' dependency: taskdep: attrs: diff --git a/parm/wflow/verify_pre.yaml b/parm/wflow/verify_pre.yaml index eb1a7eb796..b7511bf63f 100644 --- a/parm/wflow/verify_pre.yaml +++ b/parm/wflow/verify_pre.yaml @@ -85,7 +85,7 @@ task_run_MET_Pb2nc_obs: <<: *default_vars VAR: ADPSFC ACCUM_HH: '01' - obs_or_fcst: obs + FCST_OR_OBS: OBS OBTYPE: NDAS OBS_DIR: '&NDAS_OBS_DIR;' METPLUSTOOLNAME: 'PB2NC' @@ -115,7 +115,7 @@ metatask_PcpCombine_obs: <<: *default_vars VAR: APCP ACCUM_HH: '#ACCUM_HH#' - obs_or_fcst: obs + FCST_OR_OBS: OBS OBTYPE: CCPA OBS_DIR: '&CCPA_OBS_DIR;' METPLUSTOOLNAME: 'PCPCOMBINE' @@ -226,7 +226,7 @@ metatask_PcpCombine_fcst_APCP_all_accums_all_mems: <<: *default_vars VAR: APCP ACCUM_HH: '#ACCUM_HH#' - obs_or_fcst: fcst + FCST_OR_OBS: FCST OBTYPE: CCPA OBS_DIR: '&CCPA_OBS_DIR;' METPLUSTOOLNAME: 'PCPCOMBINE' @@ -254,7 +254,7 @@ metatask_PcpCombine_fcst_ASNOW_all_accums_all_mems: <<: *default_vars VAR: ASNOW ACCUM_HH: '#ACCUM_HH#' - obs_or_fcst: fcst + FCST_OR_OBS: FCST OBTYPE: NOHRSC OBS_DIR: '&NOHRSC_OBS_DIR;' METPLUSTOOLNAME: 'PCPCOMBINE' diff --git a/scripts/exregional_check_post_output.sh b/scripts/exregional_check_post_output.sh index 1352d38789..ba0d141c5d 100755 --- a/scripts/exregional_check_post_output.sh +++ b/scripts/exregional_check_post_output.sh @@ -50,10 +50,11 @@ print_info_msg " Entering script: \"${scrfunc_fn}\" In directory: \"${scrfunc_dir}\" -This is the ex-script for the task that checks that all the post-processed -output files in fact exist and are at least a certain age. These files -may have been generated by UPP as part of the current SRW App workflow, -or they may be user-staged. +This is the ex-script for the task that checks that no more than +NUM_MISSING_FCST_FILES_MAX of each forecast's (ensemble member's) post- +processed output files are missing. Note that such files may have been +generated by UPP as part of the current SRW App workflow, or they may be +user-staged. ========================================================================" # #----------------------------------------------------------------------- diff --git a/scripts/exregional_parse_vx_config.sh b/scripts/exregional_parse_vx_config.sh new file mode 100755 index 0000000000..13632c7e53 --- /dev/null +++ b/scripts/exregional_parse_vx_config.sh @@ -0,0 +1,94 @@ +#!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +. $USHdir/source_util_funcs.sh +source_config_for_task "task_run_met_pcpcombine|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +# +#----------------------------------------------------------------------- +# +# Source files defining auxiliary functions for verification. +# +#----------------------------------------------------------------------- +# +. $USHdir/set_vx_fhr_list.sh +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# +{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the ex-script for the task that reads in the \"coupled\" yaml +verification (vx) configuration file (python dictionary) and generates +from it two \"decoupled\" vx configuration dictionaries, one for forecasts +and another for observations. The task then writes these two decoupled +dictionaries to a new configuration file in the experiment directory +that can be read by downstream vx tasks. +========================================================================" +# +#----------------------------------------------------------------------- +# +# Call python script to generate vx configuration file containing +# separate vx configuration dictionaries for forecasts and observations. +# +#----------------------------------------------------------------------- +# +python3 ${USHdir}/metplus/decouple_fcst_obs_vx_config.py \ + --vx_type "${DET_OR_ENS}" \ + --outfile_type "txt" \ + --outdir "${EXPTDIR}" +# +#----------------------------------------------------------------------- +# +# Print message indicating successful completion of script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Done extracting vx configuration. + +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" +========================================================================" +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# +{ restore_shell_opts; } > /dev/null 2>&1 diff --git a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh index aa24abbb10..529d8d92cc 100755 --- a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh +++ b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh @@ -114,53 +114,6 @@ set_vx_params \ # #----------------------------------------------------------------------- # -# Set additional field-dependent verification parameters. -# -#----------------------------------------------------------------------- -# -if [ "${grid_or_point}" = "grid" ]; then - - case "${FIELDNAME_IN_MET_FILEDIR_NAMES}" in - "APCP01h") - FIELD_THRESHOLDS="gt0.0, ge0.254, ge0.508, ge2.54" - ;; - "APCP03h") - FIELD_THRESHOLDS="gt0.0, ge0.508, ge2.54, ge6.350" - ;; - "APCP06h") - FIELD_THRESHOLDS="gt0.0, ge2.54, ge6.350, ge12.700" - ;; - "APCP24h") - FIELD_THRESHOLDS="gt0.0, ge6.350, ge12.700, ge25.400" - ;; - "ASNOW06h") - FIELD_THRESHOLDS="gt0.0, ge2.54, ge5.08, ge10.16, ge20.32" - ;; - "ASNOW24h") - FIELD_THRESHOLDS="gt0.0, ge2.54, ge5.08, ge10.16, ge20.32" - ;; - "REFC") - FIELD_THRESHOLDS="ge20, ge30, ge40, ge50" - ;; - "RETOP") - FIELD_THRESHOLDS="ge20, ge30, ge40, ge50" - ;; - *) - print_err_msg_exit "\ -Verification parameters have not been defined for this field -(FIELDNAME_IN_MET_FILEDIR_NAMES): - FIELDNAME_IN_MET_FILEDIR_NAMES = \"${FIELDNAME_IN_MET_FILEDIR_NAMES}\"" - ;; - esac - -elif [ "${grid_or_point}" = "point" ]; then - - FIELD_THRESHOLDS="" - -fi -# -#----------------------------------------------------------------------- -# # Set paths and file templates for input to and output from the MET/ # METplus tool to be run as well as other file/directory parameters. # @@ -233,13 +186,10 @@ for (( i=0; i<${NUM_ENS_MEMBERS}; i++ )); do template="${FCST_SUBDIR_TEMPLATE}/${FCST_FN_TEMPLATE}" fi - slash_ensmem_subdir_or_null="/${ensmem_name}" if [ -z "${FCST_INPUT_FN_TEMPLATE}" ]; then FCST_INPUT_FN_TEMPLATE="$(eval echo ${template})" else - FCST_INPUT_FN_TEMPLATE="\ -${FCST_INPUT_FN_TEMPLATE}, -$(eval echo ${template})" + FCST_INPUT_FN_TEMPLATE="${FCST_INPUT_FN_TEMPLATE}, $(eval echo ${template})" fi done @@ -251,6 +201,16 @@ STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}" #----------------------------------------------------------------------- # # Set the array of forecast hours for which to run the MET/METplus tool. +# This is done by starting with the full list of forecast hours for which +# there is forecast output and then removing from that list any forecast +# hours for which there is no corresponding observation data. +# +# Note that strictly speaking, this does not need to be done if the MET/ +# METplus tool being called is GenEnsProd (because this tool only operates +# on forecasts), but we run the check anyway in this case in order to +# keep the code here simpler and because the output of GenEnsProd for +# forecast hours with missing observations will not be used anyway in +# downstream verification tasks. # #----------------------------------------------------------------------- # @@ -318,16 +278,27 @@ fi # # First, set the base file names. # -metplus_config_tmpl_fn="${VAR}" -metplus_config_tmpl_fn="${MetplusToolName}_${metplus_config_tmpl_fn}" -metplus_config_fn="${MetplusToolName}_${FIELDNAME_IN_MET_FILEDIR_NAMES}" -metplus_log_fn="${metplus_config_fn}" +metplus_config_tmpl_bn="${MetplusToolName}" +metplus_config_bn="${MetplusToolName}_${FIELDNAME_IN_MET_FILEDIR_NAMES}" +metplus_log_bn="${metplus_config_bn}" # # Add prefixes and suffixes (extensions) to the base file names. # -metplus_config_tmpl_fn="${metplus_config_tmpl_fn}.conf" -metplus_config_fn="${metplus_config_fn}.conf" -metplus_log_fn="metplus.log.${metplus_log_fn}" +metplus_config_tmpl_fn="${metplus_config_tmpl_bn}.conf" +metplus_config_fn="${metplus_config_bn}.conf" +metplus_log_fn="metplus.log.${metplus_log_bn}" +# +#----------------------------------------------------------------------- +# +# Load the yaml-like file containing the configuration for ensemble +# verification. +# +#----------------------------------------------------------------------- +# +det_or_ens="ens" +vx_config_output_fn="vx_config_${det_or_ens}.txt" +vx_config_output_fp="${EXPTDIR}/${vx_config_output_fn}" +vx_config_dict=$(<"${vx_config_output_fp}") # #----------------------------------------------------------------------- # @@ -387,20 +358,22 @@ settings="\ 'obtype': '${OBTYPE}' 'accum_hh': '${ACCUM_HH:-}' 'accum_no_pad': '${ACCUM_NO_PAD:-}' - 'field_thresholds': '${FIELD_THRESHOLDS:-}' + 'metplus_templates_dir': '${METPLUS_CONF:-}' + 'input_field_group': '${VAR:-}' + 'input_level_fcst': '${FCST_LEVEL:-}' + 'input_thresh_fcst': '${FCST_THRESH:-}' + 'vx_config_dict': ${vx_config_dict:-} " # Render the template to create a METplus configuration file tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)") -cat > $tmpfile << EOF -$settings -EOF - +printf "%s" "$settings" > "$tmpfile" uw template render \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} \ --verbose \ - --values-file "${tmpfile}" + --values-file "${tmpfile}" \ + --search-path "/" err=$? rm $tmpfile diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh index 93444069cb..b8f0c49fec 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh @@ -125,53 +125,6 @@ time_lag=$( bc -l <<< "${ENS_TIME_LAG_HRS[$i]}*${SECS_PER_HOUR}" ) # #----------------------------------------------------------------------- # -# Set additional field-dependent verification parameters. -# -#----------------------------------------------------------------------- -# -if [ "${grid_or_point}" = "grid" ]; then - - case "${FIELDNAME_IN_MET_FILEDIR_NAMES}" in - "APCP01h") - FIELD_THRESHOLDS="gt0.0, ge0.254, ge0.508, ge1.27, ge2.54" - ;; - "APCP03h") - FIELD_THRESHOLDS="gt0.0, ge0.254, ge0.508, ge1.27, ge2.54, ge3.810, ge6.350" - ;; - "APCP06h") - FIELD_THRESHOLDS="gt0.0, ge0.254, ge0.508, ge1.27, ge2.54, ge3.810, ge6.350, ge8.890, ge12.700" - ;; - "APCP24h") - FIELD_THRESHOLDS="gt0.0, ge0.254, ge0.508, ge1.27, ge2.54, ge3.810, ge6.350, ge8.890, ge12.700, ge25.400" - ;; - "ASNOW06h") - FIELD_THRESHOLDS="gt0.0, ge2.54, ge5.08, ge10.16, ge20.32" - ;; - "ASNOW24h") - FIELD_THRESHOLDS="gt0.0, ge2.54, ge5.08, ge10.16, ge20.32" - ;; - "REFC") - FIELD_THRESHOLDS="ge20, ge30, ge40, ge50" - ;; - "RETOP") - FIELD_THRESHOLDS="ge20, ge30, ge40, ge50" - ;; - *) - print_err_msg_exit "\ -Verification parameters have not been defined for this field -(FIELDNAME_IN_MET_FILEDIR_NAMES): - FIELDNAME_IN_MET_FILEDIR_NAMES = \"${FIELDNAME_IN_MET_FILEDIR_NAMES}\"" - ;; - esac - -elif [ "${grid_or_point}" = "point" ]; then - - FIELD_THRESHOLDS="" - -fi -# -#----------------------------------------------------------------------- -# # Set paths and file templates for input to and output from the MET/ # METplus tool to be run as well as other file/directory parameters. # @@ -252,6 +205,9 @@ STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}" #----------------------------------------------------------------------- # # Set the array of forecast hours for which to run the MET/METplus tool. +# This is done by starting with the full list of forecast hours for which +# there is forecast output and then removing from that list any forecast +# hours for which there is no corresponding observation data. # #----------------------------------------------------------------------- # @@ -319,16 +275,27 @@ fi # # First, set the base file names. # -metplus_config_tmpl_fn="${VAR}" -metplus_config_tmpl_fn="${MetplusToolName}_${metplus_config_tmpl_fn}" -metplus_config_fn="${MetplusToolName}_${FIELDNAME_IN_MET_FILEDIR_NAMES}_${ensmem_name}" -metplus_log_fn="${metplus_config_fn}" +metplus_config_tmpl_bn="GridStat_or_PointStat" +metplus_config_bn="${MetplusToolName}_${FIELDNAME_IN_MET_FILEDIR_NAMES}_${ensmem_name}" +metplus_log_bn="${metplus_config_bn}" # # Add prefixes and suffixes (extensions) to the base file names. # -metplus_config_tmpl_fn="${metplus_config_tmpl_fn}.conf" -metplus_config_fn="${metplus_config_fn}.conf" -metplus_log_fn="metplus.log.${metplus_log_fn}" +metplus_config_tmpl_fn="${metplus_config_tmpl_bn}.conf" +metplus_config_fn="${metplus_config_bn}.conf" +metplus_log_fn="metplus.log.${metplus_log_bn}" +# +#----------------------------------------------------------------------- +# +# Load the yaml-like file containing the configuration for deterministic +# verification. +# +#----------------------------------------------------------------------- +# +det_or_ens="det" +vx_config_output_fn="vx_config_${det_or_ens}.txt" +vx_config_output_fp="${EXPTDIR}/${vx_config_output_fn}" +vx_config_dict=$(<"${vx_config_output_fp}") # #----------------------------------------------------------------------- # @@ -388,20 +355,22 @@ settings="\ 'obtype': '${OBTYPE}' 'accum_hh': '${ACCUM_HH:-}' 'accum_no_pad': '${ACCUM_NO_PAD:-}' - 'field_thresholds': '${FIELD_THRESHOLDS:-}' + 'metplus_templates_dir': '${METPLUS_CONF:-}' + 'input_field_group': '${VAR:-}' + 'input_level_fcst': '${FCST_LEVEL:-}' + 'input_thresh_fcst': '${FCST_THRESH:-}' + 'vx_config_dict': ${vx_config_dict:-} " # Render the template to create a METplus configuration file tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)") -cat > $tmpfile << EOF -$settings -EOF - +printf "%s" "$settings" > "$tmpfile" uw template render \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} \ --verbose \ - --values-file "${tmpfile}" + --values-file "${tmpfile}" \ + --search-path "/" err=$? rm $tmpfile @@ -415,7 +384,6 @@ $settings" print_err_msg_exit "${message_txt}" fi fi - # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh index 4b9716493e..9939daaf76 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh @@ -103,53 +103,6 @@ set_vx_params \ # #----------------------------------------------------------------------- # -# Set additional field-dependent verification parameters. -# -#----------------------------------------------------------------------- -# -if [ "${grid_or_point}" = "grid" ]; then - - case "${FIELDNAME_IN_MET_FILEDIR_NAMES}" in - "APCP01h") - FIELD_THRESHOLDS="gt0.0, ge0.254, ge0.508, ge2.54" - ;; - "APCP03h") - FIELD_THRESHOLDS="gt0.0, ge0.508, ge2.54, ge6.350" - ;; - "APCP06h") - FIELD_THRESHOLDS="gt0.0, ge2.54, ge6.350, ge12.700" - ;; - "APCP24h") - FIELD_THRESHOLDS="gt0.0, ge6.350, ge12.700, ge25.400" - ;; - "ASNOW06h") - FIELD_THRESHOLDS="gt0.0, ge2.54, ge5.08, ge10.16, ge20.32" - ;; - "ASNOW24h") - FIELD_THRESHOLDS="gt0.0, ge2.54, ge5.08, ge10.16, ge20.32" - ;; - "REFC") - FIELD_THRESHOLDS="ge20, ge30, ge40, ge50" - ;; - "RETOP") - FIELD_THRESHOLDS="ge20, ge30, ge40, ge50" - ;; - *) - print_err_msg_exit "\ -Verification parameters have not been defined for this field -(FIELDNAME_IN_MET_FILEDIR_NAMES): - FIELDNAME_IN_MET_FILEDIR_NAMES = \"${FIELDNAME_IN_MET_FILEDIR_NAMES}\"" - ;; - esac - -elif [ "${grid_or_point}" = "point" ]; then - - FIELD_THRESHOLDS="" - -fi -# -#----------------------------------------------------------------------- -# # Set paths and file templates for input to and output from the MET/ # METplus tool to be run as well as other file/directory parameters. # @@ -201,6 +154,9 @@ STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}_ensmean" #----------------------------------------------------------------------- # # Set the array of forecast hours for which to run the MET/METplus tool. +# This is done by starting with the full list of forecast hours for which +# there is forecast output and then removing from that list any forecast +# hours for which there is no corresponding observation data. # #----------------------------------------------------------------------- # @@ -277,16 +233,27 @@ fi # # First, set the base file names. # -metplus_config_tmpl_fn="${VAR}" -metplus_config_tmpl_fn="${MetplusToolName}_ensmean_${metplus_config_tmpl_fn}" -metplus_config_fn="${MetplusToolName}_ensmean_${FIELDNAME_IN_MET_FILEDIR_NAMES}" -metplus_log_fn="${metplus_config_fn}" +metplus_config_tmpl_bn="${MetplusToolName}_ensmean" +metplus_config_bn="${MetplusToolName}_ensmean_${FIELDNAME_IN_MET_FILEDIR_NAMES}" +metplus_log_bn="${metplus_config_bn}" # # Add prefixes and suffixes (extensions) to the base file names. # -metplus_config_tmpl_fn="${metplus_config_tmpl_fn}.conf" -metplus_config_fn="${metplus_config_fn}.conf" -metplus_log_fn="metplus.log.${metplus_log_fn}" +metplus_config_tmpl_fn="${metplus_config_tmpl_bn}.conf" +metplus_config_fn="${metplus_config_bn}.conf" +metplus_log_fn="metplus.log.${metplus_log_bn}" +# +#----------------------------------------------------------------------- +# +# Load the yaml-like file containing the configuration for ensemble +# verification. +# +#----------------------------------------------------------------------- +# +det_or_ens="ens" +vx_config_output_fn="vx_config_${det_or_ens}.txt" +vx_config_output_fp="${EXPTDIR}/${vx_config_output_fn}" +vx_config_dict=$(<"${vx_config_output_fp}") # #----------------------------------------------------------------------- # @@ -346,20 +313,22 @@ settings="\ 'obtype': '${OBTYPE}' 'accum_hh': '${ACCUM_HH:-}' 'accum_no_pad': '${ACCUM_NO_PAD:-}' - 'field_thresholds': '${FIELD_THRESHOLDS:-}' + 'metplus_templates_dir': '${METPLUS_CONF:-}' + 'input_field_group': '${VAR:-}' + 'input_level_fcst': '${FCST_LEVEL:-}' + 'input_thresh_fcst': '${FCST_THRESH:-}' + 'vx_config_dict': ${vx_config_dict:-} " # Render the template to create a METplus configuration file tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)") -cat > $tmpfile << EOF -$settings -EOF - +printf "%s" "$settings" > "$tmpfile" uw template render \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} \ --verbose \ - --values-file "${tmpfile}" + --values-file "${tmpfile}" \ + --search-path "/" err=$? rm $tmpfile @@ -373,7 +342,6 @@ $settings" print_err_msg_exit "${message_txt}" fi fi - # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh index 918fb900d3..33d00b1d37 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh @@ -153,6 +153,9 @@ STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}_ensprob" #----------------------------------------------------------------------- # # Set the array of forecast hours for which to run the MET/METplus tool. +# This is done by starting with the full list of forecast hours for which +# there is forecast output and then removing from that list any forecast +# hours for which there is no corresponding observation data. # #----------------------------------------------------------------------- # @@ -229,16 +232,27 @@ fi # # First, set the base file names. # -metplus_config_tmpl_fn="${VAR}" -metplus_config_tmpl_fn="${MetplusToolName}_ensprob_${metplus_config_tmpl_fn}" -metplus_config_fn="${MetplusToolName}_ensprob_${FIELDNAME_IN_MET_FILEDIR_NAMES}" -metplus_log_fn="${metplus_config_fn}" +metplus_config_tmpl_bn="${MetplusToolName}_ensprob" +metplus_config_bn="${MetplusToolName}_ensprob_${FIELDNAME_IN_MET_FILEDIR_NAMES}" +metplus_log_bn="${metplus_config_bn}" # # Add prefixes and suffixes (extensions) to the base file names. # -metplus_config_tmpl_fn="${metplus_config_tmpl_fn}.conf" -metplus_config_fn="${metplus_config_fn}.conf" -metplus_log_fn="metplus.log.${metplus_log_fn}" +metplus_config_tmpl_fn="${metplus_config_tmpl_bn}.conf" +metplus_config_fn="${metplus_config_bn}.conf" +metplus_log_fn="metplus.log.${metplus_log_bn}" +# +#----------------------------------------------------------------------- +# +# Load the yaml-like file containing the configuration for ensemble +# verification. +# +#----------------------------------------------------------------------- +# +det_or_ens="ens" +vx_config_output_fn="vx_config_${det_or_ens}.txt" +vx_config_output_fp="${EXPTDIR}/${vx_config_output_fn}" +vx_config_dict=$(<"${vx_config_output_fp}") # #----------------------------------------------------------------------- # @@ -298,20 +312,22 @@ settings="\ 'obtype': '${OBTYPE}' 'accum_hh': '${ACCUM_HH:-}' 'accum_no_pad': '${ACCUM_NO_PAD:-}' - 'field_thresholds': '${FIELD_THRESHOLDS:-}' + 'metplus_templates_dir': '${METPLUS_CONF:-}' + 'input_field_group': '${VAR:-}' + 'input_level_fcst': '${FCST_LEVEL:-}' + 'input_thresh_fcst': '${FCST_THRESH:-}' + 'vx_config_dict': ${vx_config_dict:-} " # Render the template to create a METplus configuration file tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)") -cat > $tmpfile << EOF -$settings -EOF - +printf "%s" "$settings" > "$tmpfile" uw template render \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} \ --verbose \ - --values-file "${tmpfile}" + --values-file "${tmpfile}" \ + --search-path "/" err=$? rm $tmpfile @@ -325,8 +341,6 @@ $settings" print_err_msg_exit "${message_txt}" fi fi - - # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_pb2nc_obs.sh b/scripts/exregional_run_met_pb2nc_obs.sh index 2528c32ced..985cd33c7f 100755 --- a/scripts/exregional_run_met_pb2nc_obs.sh +++ b/scripts/exregional_run_met_pb2nc_obs.sh @@ -272,20 +272,17 @@ settings="\ 'obtype': '${OBTYPE}' 'accum_hh': '${ACCUM_HH:-}' 'accum_no_pad': '${ACCUM_NO_PAD:-}' - 'field_thresholds': '${FIELD_THRESHOLDS:-}' " # Render the template to create a METplus configuration file tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)") -cat > $tmpfile << EOF -$settings -EOF - +printf "%s" "$settings" > "$tmpfile" uw template render \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} \ --verbose \ - --values-file "${tmpfile}" + --values-file "${tmpfile}" \ + --search-path "/" err=$? rm $tmpfile @@ -299,7 +296,6 @@ $settings" print_err_msg_exit "${message_txt}" fi fi - # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_pcpcombine.sh b/scripts/exregional_run_met_pcpcombine.sh index fb495a6145..6e64d102e6 100755 --- a/scripts/exregional_run_met_pcpcombine.sh +++ b/scripts/exregional_run_met_pcpcombine.sh @@ -120,7 +120,7 @@ set_vx_params \ #----------------------------------------------------------------------- # time_lag="0" -if [ "${obs_or_fcst}" = "fcst" ]; then +if [ "${FCST_OR_OBS}" = "FCST" ]; then i="0" if [ "${DO_ENSEMBLE}" = "TRUE" ]; then i=$( bc -l <<< "${ENSMEM_INDX}-1" ) @@ -137,10 +137,9 @@ fi # vx_fcst_input_basedir=$( eval echo "${VX_FCST_INPUT_BASEDIR}" ) vx_output_basedir=$( eval echo "${VX_OUTPUT_BASEDIR}" ) -if [ "${obs_or_fcst}" = "fcst" ]; then +if [ "${FCST_OR_OBS}" = "FCST" ]; then ensmem_indx=$(printf "%0${VX_NDIGITS_ENSMEM_NAMES}d" $(( 10#${ENSMEM_INDX}))) ensmem_name="mem${ensmem_indx}" - if [ "${RUN_ENVIR}" = "nco" ]; then slash_cdate_or_null="" slash_ensmem_subdir_or_null="" @@ -171,17 +170,7 @@ OBS_INPUT_FN_TEMPLATE="" FCST_INPUT_DIR="" FCST_INPUT_FN_TEMPLATE="" -if [ "${obs_or_fcst}" = "obs" ]; then - - OBS_INPUT_DIR="${OBS_DIR}" - OBS_INPUT_FN_TEMPLATE=$( eval echo ${OBS_CCPA_APCP_FN_TEMPLATE} ) - - OUTPUT_BASE="${vx_output_basedir}" - OUTPUT_DIR="${OUTPUT_BASE}/metprd/${MetplusToolName}_obs" - OUTPUT_FN_TEMPLATE=$( eval echo ${OBS_CCPA_APCP_FN_TEMPLATE_PCPCOMBINE_OUTPUT} ) - STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}" - -elif [ "${obs_or_fcst}" = "fcst" ]; then +if [ "${FCST_OR_OBS}" = "FCST" ]; then FCST_INPUT_DIR="${vx_fcst_input_basedir}" FCST_INPUT_FN_TEMPLATE=$( eval echo ${FCST_SUBDIR_TEMPLATE:+${FCST_SUBDIR_TEMPLATE}/}${FCST_FN_TEMPLATE} ) @@ -191,22 +180,36 @@ elif [ "${obs_or_fcst}" = "fcst" ]; then OUTPUT_FN_TEMPLATE=$( eval echo ${FCST_FN_TEMPLATE_PCPCOMBINE_OUTPUT} ) STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}" +elif [ "${FCST_OR_OBS}" = "OBS" ]; then + + OBS_INPUT_DIR="${OBS_DIR}" + OBS_INPUT_FN_TEMPLATE=$( eval echo ${OBS_CCPA_APCP_FN_TEMPLATE} ) + + OUTPUT_BASE="${vx_output_basedir}" + OUTPUT_DIR="${OUTPUT_BASE}/metprd/${MetplusToolName}_obs" + OUTPUT_FN_TEMPLATE=$( eval echo ${OBS_CCPA_APCP_FN_TEMPLATE_PCPCOMBINE_OUTPUT} ) + STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}" + fi # #----------------------------------------------------------------------- # # Set the array of forecast hours for which to run the MET/METplus tool. +# This is done by starting with the full list of forecast hours for which +# there is forecast output and then removing from that list any forecast +# hours for which there is no corresponding observation data (if combining +# observed APCP) or forecast data (if combining forecast APCP). # #----------------------------------------------------------------------- # -if [ "${obs_or_fcst}" = "obs" ]; then - base_dir="${OBS_INPUT_DIR}" - fn_template="${OBS_INPUT_FN_TEMPLATE}" - num_missing_files_max="${NUM_MISSING_OBS_FILES_MAX}" -elif [ "${obs_or_fcst}" = "fcst" ]; then +if [ "${FCST_OR_OBS}" = "FCST" ]; then base_dir="${FCST_INPUT_DIR}" fn_template="${FCST_INPUT_FN_TEMPLATE}" num_missing_files_max="${NUM_MISSING_FCST_FILES_MAX}" +elif [ "${FCST_OR_OBS}" = "OBS" ]; then + base_dir="${OBS_INPUT_DIR}" + fn_template="${OBS_INPUT_FN_TEMPLATE}" + num_missing_files_max="${NUM_MISSING_OBS_FILES_MAX}" fi set_vx_fhr_list \ @@ -273,8 +276,8 @@ fi # # First, set the base file names. # -metplus_config_tmpl_fn="${MetplusToolName}_${obs_or_fcst}" -metplus_config_fn="${metplus_config_tmpl_fn}_${FIELDNAME_IN_MET_FILEDIR_NAMES}${ENSMEM_INDX:+_${ensmem_name}}" +metplus_config_tmpl_fn="${MetplusToolName}" +metplus_config_fn="${metplus_config_tmpl_fn}_$(echo_lowercase ${FCST_OR_OBS})_${FIELDNAME_IN_MET_FILEDIR_NAMES}${ENSMEM_INDX:+_${ensmem_name}}" metplus_log_fn="${metplus_config_fn}_$CDATE" # # If operating on observation files, append the cycle date to the name @@ -283,13 +286,13 @@ metplus_log_fn="${metplus_config_fn}_$CDATE" # necessary to associate the configuration file with the cycle for which # it is used). # -if [ "${obs_or_fcst}" = "obs" ]; then +if [ "${FCST_OR_OBS}" = "OBS" ]; then metplus_config_fn="${metplus_log_fn}" fi # # Add prefixes and suffixes (extensions) to the base file names. # -metplus_config_tmpl_fn="${metplus_config_tmpl_fn}_${field}.conf" +metplus_config_tmpl_fn="${metplus_config_tmpl_fn}.conf" metplus_config_fn="${metplus_config_fn}.conf" metplus_log_fn="metplus.log.${metplus_log_fn}" # @@ -326,10 +329,8 @@ settings="\ # 'metplus_config_fn': '${metplus_config_fn:-}' 'metplus_log_fn': '${metplus_log_fn:-}' - 'obs_input_dir': '${OBS_INPUT_DIR:-}' - 'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' - 'fcst_input_dir': '${FCST_INPUT_DIR:-}' - 'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' + 'input_dir': '${FCST_INPUT_DIR:-${OBS_INPUT_DIR}}' + 'input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-${OBS_INPUT_FN_TEMPLATE}}' 'output_base': '${OUTPUT_BASE}' 'output_dir': '${OUTPUT_DIR}' 'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' @@ -349,21 +350,24 @@ settings="\ 'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' 'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' 'obtype': '${OBTYPE}' + 'FCST_OR_OBS': '${FCST_OR_OBS}' 'accum_hh': '${ACCUM_HH:-}' 'accum_no_pad': '${ACCUM_NO_PAD:-}' - 'field_thresholds': '${FIELD_THRESHOLDS:-}' + 'metplus_templates_dir': '${METPLUS_CONF:-}' + 'input_field_group': '${VAR:-}' + 'input_level_fcst': '${FCST_LEVEL:-}' + 'input_thresh_fcst': '${FCST_THRESH:-}' " + # Render the template to create a METplus configuration file tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)") -cat > $tmpfile << EOF -$settings -EOF - +printf "%s" "$settings" > "$tmpfile" uw template render \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} \ --verbose \ - --values-file "${tmpfile}" + --values-file "${tmpfile}" \ + --search-path "/" err=$? rm $tmpfile diff --git a/ush/metplus/decouple_fcst_obs_vx_config.py b/ush/metplus/decouple_fcst_obs_vx_config.py new file mode 100755 index 0000000000..afa001859c --- /dev/null +++ b/ush/metplus/decouple_fcst_obs_vx_config.py @@ -0,0 +1,436 @@ +#!/usr/bin/env python3 + +import os +import sys +import glob +import argparse +import yaml + +import logging +import textwrap +from textwrap import indent, dedent + +import pprint +import subprocess + +from pathlib import Path +file = Path(__file__).resolve() +home_dir = file.parents[2] +ush_dir = Path(os.path.join(home_dir, 'ush')).resolve() +sys.path.append(str(ush_dir)) + +from python_utils import ( + log_info, + load_config_file, +) + + +def get_pprint_str(var, indent_str=''): + """ + Function to format a python variable as a pretty-printed string and add + indentation. + + Arguments: + --------- + var: + A variable. + + indent_str: + String to be added to the beginning of each line of the pretty-printed + form of var. This usually consists of multiple space characters. + + Returns: + ------- + var_str: + Formatted string containing contents of variable. + """ + + var_str = pprint.pformat(var, compact=True, sort_dicts=False) + var_str = var_str.splitlines(True) + var_str = [indent_str + s for s in var_str] + var_str = ''.join(var_str) + + return var_str + + +def create_pprinted_msg(vars_dict, indent_str='', add_nl_after_varname=False): + """ + Function to create an output message (string) containing one or more + variables' names, with each name followed possibly by a newline, an equal + sign, and the pretty-printed value of the variable. Each variable name + starts on a new line. + + Arguments: + --------- + vars_dict: + Dictionary containing the variable names (the keys) and their values + (the values). + + indent_str: + String to be added to the beginning of each line of the string before + returning it. This usually consists of multiple space characters. + + add_nl_after_varname: + Flag indicating whether to add a newline after the variable name (and + before the equal sign). + + Returns: + ------- + vars_str: + Formatted string containing contents of variable. + """ + + space_or_nl = ' ' + one_or_zero = 1 + if add_nl_after_varname: + space_or_nl = '\n' + one_or_zero = 0 + + vars_str = '' + for var_name, var_value in vars_dict.items(): + pprint_indent_str = ' '*(2 + one_or_zero*(1 + len(var_name))) + tmp = f'{var_name}' + space_or_nl + '= ' + \ + get_pprint_str(var_value, pprint_indent_str).lstrip() + vars_str = '\n'.join([vars_str, tmp]) + + vars_str = indent(vars_str, indent_str) + + return vars_str + + +def extract_fcst_obs_vals_from_cpld(item_cpld): + """ + Function to parse the "coupled" value of an item (obtained from the coupled + verification (vx) configuration dictionary) to extract from it the item's + value for forecasts and its value for observations. The coupled item + (item_cpld) is a string that may correspond to a field name, a level, or + a threshold. If item_cpld has the form + + item_cpld = str1 + delim_str + str2 + + where delim_str is a delimiter string (e.g. delim_str may be set to '%%'), + then the forecast and observation values of the item are given by + + item_fcst = str1 + item_obs = str2 + + For example, if delim_str = '%%' and + + item_cpld = 'ABCD%%EFGH' + + then + + item_fcst = 'ABCD' + item_obs = 'EFGH' + + Alternatively, if delim_str is not be a substring within item_cpld, both + return values will be identical to the input. + + Arguments: + --------- + item_cpld + String representing a "coupled" item (field name, level, or threshold). + containing both the item's forecast value and its observations value. + + Returns: + ------- + item_fcst, item_obs: + Strings containing the values of the item for forecasts and observations, + respectively. + """ + + # Set the delimiter string. + delim_str = '%%' + + # Parse the string containing the coupled value of the item to extract + # its forecast and observation values. + if delim_str in item_cpld: + if item_cpld.count(delim_str) == 1: + item_fcst, item_obs = item_cpld.split(delim_str) + else: + msg = dedent(f""" + The delimiter string (delim_str) appears more than once in the current + coupled item value (item_cpld): + delim_str = {get_pprint_str(delim_str)} + item_cpld = {get_pprint_str(item_cpld)} + Stopping. + """) + logging.error(msg) + raise ValueError(msg) + else: + item_fcst = item_cpld + item_obs = item_cpld + + return item_fcst, item_obs + + +def decouple_fcst_obs_vx_config(vx_type, outfile_type, outdir='./', log_lvl='info', log_fp=''): + """ + This function reads from a yaml configuration file the coupled verification + (vx) configuration dictionary and parses it (i.e. decouples its contents) + to produce two new configuration dictionaries -- one for forecasts and + another for observations. Here, by "coupled" dictionary, we mean one that + contains items (keys and values) that store the forecast and observation + values for various quantities (field names, levels, and thresholds) in + combined/coupled form. (See the documentation for the function + extract_fcst_obs_vals_from_cpld() for more details of this coupled form.) + This function then writes the two separate (decoupled) vx configuration + dictionaries (one for forecasts and the other for observations) to a file. + + Arguments: + --------- + vx_type: + Type of verification for which the coupled dictionary to be read in + applies. This can be 'det' (for deterministic verification) or 'ens' + (for ensemble verification). + outfile_type: + Type of the output file. This can be 'txt' (for the output to be saved + in a pretty-printed text file) or 'yaml' (for the output to be saved in + a yaml-formatted file. Here, the "output" consists of the two separate + vx configuration files (one for forecasts and another for observations). + outdir: + The directory in which to save the output file. + log_lvl: + The logging level to use. + log_fp: + Path to the log file. Default is an empty string, so that logging output + is sent to stdout. + + Returns: + ------- + None + """ + + # Set up logging. + log_level = str.upper(log_lvl) + fmt = "[%(levelname)s:%(name)s: %(filename)s, line %(lineno)s: %(funcName)s()] %(message)s" + if log_fp: + logging.basicConfig(level=log_level, format=fmt, filename=log_fp, filemode='w') + else: + logging.basicConfig(level=log_level, format=fmt) + logging.basicConfig(level=log_level) + + # Load the yaml file containing the coupled forecast-and-observations + # verification (vx) configuration dictionary. + metplus_conf_dir = Path(os.path.join(home_dir, 'parm', 'metplus')).resolve() + config_fn = ''.join(['vx_config_', vx_type, '.yaml']) + config_fp = Path(os.path.join(metplus_conf_dir, config_fn)).resolve() + fgs_fields_levels_threshes_cpld = load_config_file(config_fp) + + msg = create_pprinted_msg( + vars_dict = {'fgs_fields_levels_threshes_cpld': fgs_fields_levels_threshes_cpld}, + indent_str = ' '*0, + add_nl_after_varname = True) + logging.debug(msg) + + # Loop through the field groups in the coupled vx configuration dictionary + # and generate two separate vx configuration dictionaries, one for forecasts + # and another for observations. + fgs_fields_levels_threshes_fcst = {} + fgs_fields_levels_threshes_obs = {} + indent_incr = 4 + indent_size = indent_incr + indent_str = ' '*indent_size + for field_group, fields_levels_threshes_cpld in fgs_fields_levels_threshes_cpld.items(): + + msg = create_pprinted_msg( + vars_dict = {'field_group': field_group}, + indent_str = indent_str) + logging.debug(msg) + + # Loop over the field names associated with the current field group. + # + # Note that the following variables have to be lists of dictionaries + # (where each dictionary contains only one key-value pair) instead of + # dictionaries because the field names might be repeated and thus cannot + # be used as dictionary keys. For example, in the ADPSFC field group, + # the forecast fields CRAIN, CSNOW, CFRZR, and CICEP all have the + # corresponding observation field PRWE but with different thresholds, + # so although fields_levels_threshes_fcst could be a dictionary with + # CRAIN, CSNOW, CFRZR, and CICEP as keys, fields_levels_threshes_obs + # cannot be a dictionary because the string PRWE cannot be used as a key + # more than once. + fields_levels_threshes_fcst = [] + fields_levels_threshes_obs = [] + indent_size += indent_incr + indent_str = ' '*indent_size + for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items(): + + msg = create_pprinted_msg( + vars_dict = {'field_cpld': field_cpld}, + indent_str = indent_str) + logging.debug(msg) + + # Parse the current coupled field name to extract the forecast and + # observation field names. + field_fcst, field_obs = extract_fcst_obs_vals_from_cpld(field_cpld) + + msg = create_pprinted_msg( + vars_dict = {'field_fcst': field_fcst, 'field_obs': field_obs}, + indent_str = indent_str) + logging.debug(msg) + + # Loop over the levels associated with the current field. + levels_threshes_fcst = {} + levels_threshes_obs = {} + indent_size += indent_incr + indent_str = ' '*indent_size + for level_cpld, threshes_cpld in levels_threshes_cpld.items(): + + msg = create_pprinted_msg( + vars_dict = {'level_cpld': level_cpld}, + indent_str = indent_str) + logging.debug(msg) + + # Parse the current coupled level to extract the forecast and observation + # levels. + level_fcst, level_obs = extract_fcst_obs_vals_from_cpld(level_cpld) + + msg = create_pprinted_msg( + vars_dict = {'level_fcst': level_fcst, 'level_obs': level_obs}, + indent_str = indent_str) + logging.debug(msg) + + # Loop over the thresholds associated with the current level. + threshes_fcst = [] + threshes_obs = [] + indent_size += indent_incr + indent_str = ' '*indent_size + for thresh_cpld in threshes_cpld: + + msg = create_pprinted_msg( + vars_dict = {'thresh_cpld': thresh_cpld}, + indent_str = indent_str) + logging.debug(msg) + + # Parse the current coupled threshold to extract the forecast and + # observation thresholds. + thresh_fcst, thresh_obs = extract_fcst_obs_vals_from_cpld(thresh_cpld) + + msg = create_pprinted_msg( + vars_dict = {'thresh_fcst': thresh_fcst, 'thresh_obs': thresh_obs}, + indent_str = indent_str) + logging.debug(msg) + + threshes_fcst.append(thresh_fcst) + threshes_obs.append(thresh_obs) + + indent_size -= indent_incr + indent_str = ' '*indent_size + msg = create_pprinted_msg( + vars_dict = {'threshes_fcst': threshes_fcst, + 'threshes_obs': threshes_obs}, + indent_str = indent_str, + add_nl_after_varname = True) + logging.debug(msg) + + levels_threshes_fcst[level_fcst] = threshes_fcst + levels_threshes_obs[level_obs] = threshes_obs + + indent_size -= indent_incr + indent_str = ' '*indent_size + msg = create_pprinted_msg( + vars_dict = {'levels_threshes_fcst': levels_threshes_fcst, + 'levels_threshes_obs': levels_threshes_obs}, + indent_str = indent_str, + add_nl_after_varname = True) + logging.debug(msg) + + fields_levels_threshes_fcst.append({field_fcst: levels_threshes_fcst}) + fields_levels_threshes_obs.append({field_obs: levels_threshes_obs}) + + indent_size -= indent_incr + indent_str = ' '*indent_size + msg = create_pprinted_msg( + vars_dict = {'fields_levels_threshes_fcst': fields_levels_threshes_fcst, + 'fields_levels_threshes_obs': fields_levels_threshes_obs}, + indent_str = indent_str, + add_nl_after_varname = True) + logging.debug(msg) + + fgs_fields_levels_threshes_fcst[field_group] = fields_levels_threshes_fcst + fgs_fields_levels_threshes_obs[field_group] = fields_levels_threshes_obs + + indent_size -= indent_incr + indent_str = ' '*indent_size + msg = create_pprinted_msg( + vars_dict = {'fgs_fields_levels_threshes_fcst': fgs_fields_levels_threshes_fcst, + 'fgs_fields_levels_threshes_obs': fgs_fields_levels_threshes_obs}, + indent_str = indent_str, + add_nl_after_varname = True) + logging.debug(msg) + + # We now have a verification configuration dictionary for forecasts and + # a separate one for the observations. To conveniently write these to a + # file, first place (wrap) them in a higher-level dictionary. + vx_config_dict = {'fcst': fgs_fields_levels_threshes_fcst, + 'obs': fgs_fields_levels_threshes_obs} + + # Write the contents of the higher-level dictionary to file. + output_fn = ''.join(['vx_config_', vx_type, '.', outfile_type]) + output_fp = Path(os.path.join(outdir, output_fn)).resolve() + with open(f'{output_fp}', 'w') as fn: + if outfile_type == 'txt': + dict_to_str = get_pprint_str(vx_config_dict, ' ') + fn.write(dict_to_str) + elif outfile_type == 'yaml': + yaml_vars = yaml.dump(vx_config_dict, fn) + + return None +# +# ----------------------------------------------------------------------- +# +# Call the function defined above. +# +# ----------------------------------------------------------------------- +# +if __name__ == "__main__": + + parser = argparse.ArgumentParser( + description='Read in and process verification configuration file' + ) + + default_vx_type = 'det' + parser.add_argument('--vx_type', + type=str, + required=True, + choices=['det', 'ens'], + default=default_vx_type, + help=dedent(f""" + String that determines whether to read in the deterministic or ensemble + verification configuration file. + """)) + + default_outfile_type = 'txt' + parser.add_argument('--outfile_type', + type=str, + required=True, + choices=['txt', 'yaml'], + default=default_outfile_type, + help=dedent(f""" + Type of output file. The output consists of a high-level dictionary + containing two keys: 'fcst' and 'obs'. The value of 'fcst' is the vx + configuration dictionary for forecasts, and the value of 'obs' is the vx + dictionary for observations. If outfile_type is set to 'txt', this high- + level dictionary is saved to a text file in a form that can be read in by + the SRW App's ex-scripts for the verification tasks. In particular, this + form contains the curly braces and brackets that define dictionaries and + lists in python code (but that would normally not appear in a yaml file). + If outfile_type is set to 'yaml', then the high-level dictionary is saved + to a yaml-formatted file. + """)) + + parser.add_argument('--outdir', + type=str, + required=False, + default='./', + help=dedent(f""" + Directory in which to place the output file containing the decoupled + (i.e. with forecast and observation information placed in separate data + structures) verifcation configuration information. + """)) + + args = parser.parse_args() + + decouple_fcst_obs_vx_config(vx_type=args.vx_type, outfile_type=args.outfile_type, outdir=args.outdir) + diff --git a/ush/set_vx_fhr_list.sh b/ush/set_vx_fhr_list.sh index 8a1c9735a5..5cefc78365 100644 --- a/ush/set_vx_fhr_list.sh +++ b/ush/set_vx_fhr_list.sh @@ -253,7 +253,7 @@ METplus configuration file. # fhr_list=$( echo "${fhr_list}" | $SED "s/^,//g" ) print_info_msg "$VERBOSE" "\ -Final (i.e. after filtering for missing files) set of foreast hours is +Final (i.e. after filtering for missing files) set of forecast hours is (written as a single string): fhr_list = \"${fhr_list}\" " From fc10bdbe140b74ffca8e4894ed4bae3321326f99 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 7 May 2024 09:22:12 -0400 Subject: [PATCH 23/42] [develop] Bump jinja2 from 3.1.3 to 3.1.4 in /doc (#1080) Bumps [jinja2](https://github.com/pallets/jinja) from 3.1.3 to 3.1.4. - [Release notes](https://github.com/pallets/jinja/releases) - [Changelog](https://github.com/pallets/jinja/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/jinja/compare/3.1.3...3.1.4) --- updated-dependencies: - dependency-name: jinja2 dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- doc/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/requirements.txt b/doc/requirements.txt index eadc94dcaf..a2f32cd83f 100644 --- a/doc/requirements.txt +++ b/doc/requirements.txt @@ -22,7 +22,7 @@ idna==3.7 # via requests imagesize==1.4.1 # via sphinx -jinja2==3.1.3 +jinja2==3.1.4 # via sphinx latexcodec==2.0.1 # via pybtex From a712ef15fc42c88012f2cd926712dedbd3a8f91e Mon Sep 17 00:00:00 2001 From: gsketefian <31046882+gsketefian@users.noreply.github.com> Date: Mon, 13 May 2024 07:16:35 -0600 Subject: [PATCH 24/42] [develop] Simplify the way the configuration of the vx is handled (#1082) The parse_vx_config_[det|ens] tasks and the decouple_fcst_obs_vx_config.py script are removed (so that the intermediate configuration files are no longer created). The separation into forecast and observation values of the "coupled" information in the vx configuration files is now performed in the jinja2 templates for the METplus configuration files, hiding these details from the user. --- jobs/JREGIONAL_PARSE_VX_CONFIG | 97 ---- parm/metplus/EnsembleStat.conf | 215 ++++----- parm/metplus/GenEnsProd.conf | 142 ++++-- parm/metplus/GridStat_ensmean.conf | 236 +++++----- parm/metplus/GridStat_ensprob.conf | 208 ++++----- parm/metplus/GridStat_or_PointStat.conf | 399 +++++++--------- parm/metplus/PointStat_ensmean.conf | 228 +++++---- parm/metplus/PointStat_ensprob.conf | 198 ++++---- parm/metplus/metplus_macros.jinja | 122 +---- parm/metplus/vx_config_det.yaml | 204 ++++---- parm/wflow/verify_det.yaml | 24 - parm/wflow/verify_ens.yaml | 24 - scripts/exregional_parse_vx_config.sh | 94 ---- ...onal_run_met_genensprod_or_ensemblestat.sh | 77 ++-- ...gional_run_met_gridstat_or_pointstat_vx.sh | 77 ++-- ...un_met_gridstat_or_pointstat_vx_ensmean.sh | 77 ++-- ...un_met_gridstat_or_pointstat_vx_ensprob.sh | 77 ++-- ush/metplus/decouple_fcst_obs_vx_config.py | 436 ------------------ 18 files changed, 1061 insertions(+), 1874 deletions(-) delete mode 100755 jobs/JREGIONAL_PARSE_VX_CONFIG delete mode 100755 scripts/exregional_parse_vx_config.sh delete mode 100755 ush/metplus/decouple_fcst_obs_vx_config.py diff --git a/jobs/JREGIONAL_PARSE_VX_CONFIG b/jobs/JREGIONAL_PARSE_VX_CONFIG deleted file mode 100755 index c1cbba8e34..0000000000 --- a/jobs/JREGIONAL_PARSE_VX_CONFIG +++ /dev/null @@ -1,97 +0,0 @@ -#!/usr/bin/env bash - -# -#----------------------------------------------------------------------- -# -# -# -#----------------------------------------------------------------------- -# - -# -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh -source_config_for_task "task_parse_vx_config" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the J-job script for the task that reads in the \"coupled\" yaml -verification (vx) configuration file (python dictionary) and generates -from it two \"decoupled\" vx configuration dictionaries, one for forecasts -and another for observations. The task then writes these two decoupled -dictionaries to a new configuration file in the experiment directory -that can be read by downstream vx tasks. - -Note: -The \"coupled\" vx configuration file contains items (dictionary keys and -values representing field names, levels, and thresholds) that consist of -both the forecast and the observation value for that item separated by a -delimiter string. Thus, they first need to be separated (decoupled) into -a value for forecasts and one for the observations before they can be -further processed. -========================================================================" -# -#----------------------------------------------------------------------- -# -# Call the ex-script for this J-job and pass to it the necessary varia- -# bles. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exregional_parse_vx_config.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# -#----------------------------------------------------------------------- -# -# Run job postamble. -# -#----------------------------------------------------------------------- -# -job_postamble -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 - diff --git a/parm/metplus/EnsembleStat.conf b/parm/metplus/EnsembleStat.conf index 1ca46b961e..2caeda1521 100644 --- a/parm/metplus/EnsembleStat.conf +++ b/parm/metplus/EnsembleStat.conf @@ -242,136 +242,123 @@ Import the file containing jinja macros. {#- Jinja requires certain variables to be defined globally within the template -before they can be used in if-statements and other scopes (see Jinja -scoping rules). Define such variables. +before they can be used in if-statements and other scopes (see Jinja scoping +rules). Define such variables. #} +{%- set indx_level_fcst = '' %} +{%- set indx_input_thresh_fcst = '' %} +{%- set error_msg = '' %} +{%- set opts_indent = '' %} +{%- set opts_indent_len = '' %} + +{%- set field_fcst = '' %} +{%- set field_obs = '' %} {%- set level_fcst = '' %} {%- set level_obs = '' %} -{%- set indx_level_fcst = '' %} +{%- set thresh_fcst = '' %} +{%- set thresh_obs = '' %} -{%- set valid_threshes_fcst = [] %} -{%- set valid_threshes_obs = [] %} {%- set threshes_fcst = [] %} {%- set threshes_obs = [] %} -{%- set indx_input_thresh_fcst = '' %} - -{%- set opts_indent = '' %} -{%- set opts_indent_len = '' %} -{%- set tmp = '' %} -{%- set error_msg = '' %} -{#- -Make sure that the set of field groups for forecasts and observations -are identical. -#} -{%- set fgs_fcst = vx_config_dict['fcst'].keys()|list %} -{%- set fgs_obs = vx_config_dict['obs'].keys()|list %} -{%- if (fgs_fcst != fgs_obs) %} - {%- set error_msg = '\n' ~ -'The list of valid field groups for forecasts (fgs_fcst) must be identical\n' ~ -'to that for observations (fgs_obs) but isn\'t:\n' ~ -' fgs_fcst = ' ~ fgs_fcst ~ '\n' ~ -' fgs_obs = ' ~ fgs_obs %} - {{metplus_macros.print_err_and_quit(error_msg)}} -{%- endif %} {#- -Extract the lists of forecast and observation dictionaries containing -the valid fields, levels, and thresholds corresponding to the specified -field group (input_field_group). Note that it would be simpler to have -these be just dictionaries in which the keys are the field names (instead -of them being LISTS of dictionaries in which each dictionary contains a -single key that is the field name), but that approach cannot be used here -because it is possible for field names to be repeated (for both forecasts -and observations). For example, in the observations, the field name -'PRWE' appears more than once, each time with a different threshold, and -the combination of name and threshold is what constitutes a unique field, -not just the name by itself. +Get the set of valid field groups and ensure that the specified input +field group appears in this list. #} -{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %} -{%- set fields_levels_threshes_obs = vx_config_dict['obs'][input_field_group] %} +{%- set valid_field_groups = vx_config_dict.keys()|list %} +{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }} {#- -Reset the specified forecast level so that if it happens to be an -accumulation (e.g. 'A03'), the leading zeros in front of the hour are -stipped out (e.g. reset to 'A3'). +Reset the input forecast level so that if it happens to be an accumulation +(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g. +reset to 'A3'). #} {%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} {#- -Ensure that the specified input forecast level(s) (input_level_fcst) and -threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the -set(s) of valid forecast levels and thresholds, respectively, specified -in fields_levels_threshes_fcst. +Extract from the configuration dictionary the set (which itself is a +dictionary) of fields, levels, and thresholds corresponding to the input +field group. Then set the delimiter string that separates forecast and +observation values in the various items (i.e. dictionary keys and values +representing field names, levels, and thresholds) in this dictionary. #} -{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }} -{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }} +{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %} +{%- set delim_str = metplus_macros.set_delim_str() %} {#- -For convenience, create lists of valid forecast and observation field -names. +Loop over the fields and set field names, levels, thresholds, and/or +options for each field, both for forecasts and for observations, in the +METplus configuration file. #} -{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %} -{%- set valid_fields_fcst = [] %} -{%- for i in range(0,num_valid_fields_fcst) %} - {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %} - {%- set tmp = valid_fields_fcst.append(field) %} -{%- endfor %} +{%- set ns = namespace(var_count = 0) %} +{%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %} -{%- set valid_fields_obs = [] %} -{%- set num_valid_fields_obs = fields_levels_threshes_obs|length %} -{%- for i in range(0,num_valid_fields_obs) %} - {%- set field = fields_levels_threshes_obs[i].keys()|list|join('') %} - {%- set tmp = valid_fields_obs.append(field) %} -{%- endfor %} + {%- if delim_str in field_cpld %} + {%- set field_fcst, field_obs = field_cpld.split(delim_str) %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- set field_obs = field_cpld %} + {%- endif %} {#- -Ensure that the number of valid fields for forecasts is equal to that -for the observations. +For convenience, create lists of valid forecast and observation levels +for the current field. #} -{%- set num_valid_fields = 0 %} -{%- if (num_valid_fields_fcst != num_valid_fields_obs) %} - {%- set error_msg = '\n' ~ -'The number of valid forecast fields (num_valid_fields_fcst) must be\n' ~ -'equal to the number of valid observation fields (num_valid_fields_obs)\n' ~ -'but isn\'t:\n' ~ -' num_valid_fields_fcst = ' ~ num_valid_fields_fcst ~ '\n' ~ -' num_valid_fields_obs = ' ~ num_valid_fields_obs ~ '\n' ~ -'The lists of valid forecast and observation fields are:\n' ~ -' valid_fields_fcst = ' ~ valid_fields_fcst ~ '\n' ~ -' valid_fields_obs = ' ~ valid_fields_obs ~ '\n' %} - {{metplus_macros.print_err_and_quit(error_msg)}} -{%- else %} - {%- set num_valid_fields = num_valid_fields_fcst %} -{%- endif %} + {%- set valid_levels_fcst = [] %} + {%- set valid_levels_obs = [] %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + {%- set tmp = valid_levels_fcst.append(level_fcst) %} + {%- set tmp = valid_levels_obs.append(level_obs) %} + {%- endfor %} {#- -Loop over the valid fields and set field names, levels, thresholds, and/ -or options for each field, both for forecasts and for obseratiions, in -the METplus configuration file. +Make sure that the input forecast level (input_level_fcst) is set to a +valid value. #} -{%- set ns = namespace(var_count = 0) %} -{%- for i in range(0,num_valid_fields) %} - - {%- set field_fcst = valid_fields_fcst[i] %} - {%- set field_obs = valid_fields_obs[i] %} + {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %} + {%- set error_msg = '\n' ~ +'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~ +'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~ +'for the current forecast field (field_fcst). This is not the case:\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~ +' input_level_fcst = ' ~ input_level_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} {#- -For convenience, create lists of valid forecast and observation levels -for the current field. Then check that the number of valid levels for -forecasts is the same as that for observations. +Loop over the (coupled) levels and corresponding lists of thresholds. +Extract from these the level values for forecasts and observations and +use them to set the forecast and observation field names, levels, +thresholds, and/or options in the METplus configuration file. #} - {%- set valid_levels_fcst = fields_levels_threshes_fcst[i][field_fcst].keys()|list %} - {%- set valid_levels_obs = fields_levels_threshes_obs[i][field_obs].keys()|list %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} -{#- -Extract dictionary of valid forecast levels (the dictionary keys) and -corresponding lists of valid thresholds (the values) for each level. -Then loop over these levels and corresponding lists of thresholds to set -both the forecast and observation field names, levels, thresholds, and/or -options. -#} - {%- set valid_levels_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst] %} - {%- for level_fcst, valid_threshes_fcst in valid_levels_threshes_fcst.items() %} + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + + {%- set valid_threshes_fcst = [] %} + {%- set valid_threshes_obs = [] %} + {%- for thresh_cpld in threshes_cpld %} + {%- if delim_str in thresh_cpld %} + {%- set thresh_fcst, thresh_obs = thresh_cpld.split(delim_str) %} + {%- else %} + {%- set thresh_fcst = thresh_cpld %} + {%- set thresh_obs = thresh_cpld %} + {%- endif %} + {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %} + {%- set tmp = valid_threshes_obs.append(thresh_obs) %} + {%- endfor %} {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %} {#- @@ -415,17 +402,19 @@ to the full set of valid values. {%- set threshes_fcst = valid_threshes_fcst %} {#- If input_thresh_fcst is set to a specific value: - 1) Ensure that input_thresh_fcst exists in the list of valid forecast - thresholds. - 2) Get the index of input_thresh_fcst in the list of valid forecast - thresholds. This will be needed later below when setting the - observation threshold(s). - 3) Use this index to set the forecast threshold to a one-element list - containing the specified forecast threshold. +* If that value is valid, i.e. it exists in the list of valid forecast + thresholds, get its index in that list and use it to set the forecast + threshold to a one-element list containing that value. Note that the + index will be needed later below when setting the observation threshold(s). +* If the input forecast threshold is not valid, print out a warning message + and exit. #} {%- else %} - {%- if input_thresh_fcst not in valid_threshes_fcst %} + {%- if input_thresh_fcst in valid_threshes_fcst %} + {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %} + {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %} + {%- else %} {%- set error_msg = '\n' ~ 'For the current forecast field (field_fcst) and forecast level (level_fcst),\n' ~ 'the input forecast threshold (input_thresh_fcst) does not exist in the list\n' ~ @@ -436,8 +425,6 @@ If input_thresh_fcst is set to a specific value: ' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %} {{metplus_macros.print_err_and_quit(error_msg)}} {%- endif %} - {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %} - {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %} {%- endif %} {#- @@ -525,7 +512,7 @@ Set observation field name. Note that this has to exactly match the name of the field in the input observation file. For accumulated fields, the input observation file is generated by MET's -PcpCombine tool. In that file, the field name consists of the observation +PcpCombine tool. In that file, the field name consists of the observation field name here (field_obs) with the accumulation period appended to it (separated by an underscore), so we must do the same here to get an exact match. @@ -557,11 +544,6 @@ set to 'none'. #} {%- if (input_thresh_fcst != 'none') %} {#- -Set the list of valid observation thresholds to the one corresponding to -the current observation level (level_obs). -#} - {%- set valid_threshes_obs = fields_levels_threshes_obs[i][field_obs][level_obs] %} -{#- If input_thresh_fcst is set to 'all', set the list of observation thresholds to the full set of valid values. #} @@ -653,6 +635,7 @@ OBS_VAR{{ns.var_count}}_OPTIONS = desc = "TKE"; {%- endif %} {%- endif %} + {#- Print out a newline to separate the settings for the current field (both forecast and observation settings) from those for the next field. diff --git a/parm/metplus/GenEnsProd.conf b/parm/metplus/GenEnsProd.conf index 7291ce02fa..6c47cedb0d 100644 --- a/parm/metplus/GenEnsProd.conf +++ b/parm/metplus/GenEnsProd.conf @@ -124,68 +124,110 @@ Import the file containing jinja macros. {#- Jinja requires certain variables to be defined globally within the template -before they can be used in if-statements and other scopes (see Jinja -scoping rules). Define such variables. +before they can be used in if-statements and other scopes (see Jinja scoping +rules). Define such variables. #} -{%- set threshes_fcst = [] %} {%- set indx_input_thresh_fcst = '' %} - +{%- set error_msg = '' %} {%- set opts_indent = '' %} {%- set opts_indent_len = '' %} -{%- set tmp = '' %} -{%- set error_msg = '' %} + +{%- set field_fcst = '' %} +{%- set level_fcst = '' %} +{%- set thresh_fcst = '' %} + +{%- set threshes_fcst = [] %} {#- -Extract the list of forecast dictionaries containing the valid fields, -levels, and thresholds corresponding to the specified field group -(input_field_group). +Get the set of valid field groups and ensure that the specified input +field group appears in this list. #} -{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %} +{%- set valid_field_groups = vx_config_dict.keys()|list %} +{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }} {#- -Reset the specified forecast level so that if it happens to be an -accumulation (e.g. 'A03'), the leading zeros in front of the hour are -stipped out (e.g. reset to 'A3'). +Reset the input forecast level so that if it happens to be an accumulation +(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g. +reset to 'A3'). #} {%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} {#- -Ensure that the specified input forecast level(s) (input_level_fcst) and -threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the -set(s) of valid forecast levels and thresholds, respectively, specified -in fields_levels_threshes_fcst. +Extract from the configuration dictionary the set (which itself is a +dictionary) of fields, levels, and thresholds corresponding to the input +field group. Then set the delimiter string that separates forecast and +observation values in the various items (i.e. dictionary keys and values +representing field names, levels, and thresholds) in this dictionary. #} -{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }} -{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }} +{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %} +{%- set delim_str = metplus_macros.set_delim_str() %} {#- -For convenience, create lists of valid forecast field names. +Loop over the fields and set field names, levels, thresholds, and/or +options for each forecast field in the METplus configuration file. Note +that GenEnsProd only deals with forecasts; it does not consider observations. #} -{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %} -{%- set valid_fields_fcst = [] %} -{%- for i in range(0,num_valid_fields_fcst) %} - {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %} - {%- set tmp = valid_fields_fcst.append(field) %} -{%- endfor %} +{%- set ns = namespace(var_count = 0) %} +{%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %} + + {%- if delim_str in field_cpld %} + {%- set field_fcst = field_cpld.split(delim_str)[0] %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- endif %} {#- -Loop over the valid fields and set field names, levels, thresholds, and/ -or options for each forecast field. Note that GenEnsProd only deals with -forecasts; it does not need observations. +For convenience, create list of valid forecast levels for the current +field. #} -{%- set ns = namespace(var_count = 0) %} -{%- for i in range(0,num_valid_fields_fcst) %} + {%- set valid_levels_fcst = [] %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + {%- if delim_str in level_cpld %} + {%- set level_fcst = level_cpld.split(delim_str)[0] %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- endif %} + {%- set tmp = valid_levels_fcst.append(level_fcst) %} + {%- endfor %} - {%- set field_fcst = valid_fields_fcst[i] %} +{#- +Make sure that the input forecast level (input_level_fcst) is set to a +valid value. +#} + {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %} + {%- set error_msg = '\n' ~ +'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~ +'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~ +'for the current forecast field (field_fcst). This is not the case:\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~ +' input_level_fcst = ' ~ input_level_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} {#- -Extract dictionary of valid forecast levels (the dictionary keys) and -corresponding lists of valid thresholds (the values) for each level. -Then loop over these levels and corresponding lists of thresholds to set -the forecast field names, levels, thresholds, and/or options. +Loop over the (coupled) levels and corresponding lists of thresholds. +Extract from these the level values for forecasts and use them to set the +forecast field names, levels, thresholds, and/or options in the METplus +configuration file. #} - {%- set valid_levels_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst] %} - {%- for level_fcst, valid_threshes_fcst in valid_levels_threshes_fcst.items() %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + + {%- if delim_str in level_cpld %} + {%- set level_fcst = level_cpld.split(delim_str)[0] %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- endif %} + + {%- set valid_threshes_fcst = [] %} + {%- for thresh_cpld in threshes_cpld %} + {%- if delim_str in thresh_cpld %} + {%- set thresh_fcst = thresh_cpld.split(delim_str)[0] %} + {%- else %} + {%- set thresh_fcst = thresh_cpld %} + {%- endif %} + {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %} + {%- endfor %} {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %} {#- @@ -229,17 +271,18 @@ to the full set of valid values. {%- set threshes_fcst = valid_threshes_fcst %} {#- If input_thresh_fcst is set to a specific value: - 1) Ensure that input_thresh_fcst exists in the list of valid forecast - thresholds. - 2) Get the index of input_thresh_fcst in the list of valid forecast - thresholds. This will be needed later below when setting the - observation threshold(s). - 3) Use this index to set the forecast threshold to a one-element list - containing the specified forecast threshold. +* If that value is valid, i.e. it exists in the list of valid forecast + thresholds, get its index in that list and use it to set the forecast + threshold to a one-element list containing that value. +* If the input forecast threshold is not valid, print out a warning message + and exit. #} {%- else %} - {%- if input_thresh_fcst not in valid_threshes_fcst %} + {%- if input_thresh_fcst in valid_threshes_fcst %} + {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %} + {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %} + {%- else %} {%- set error_msg = '\n' ~ 'For the current forecast field (field_fcst) and forecast level (level_fcst),\n' ~ 'the input forecast threshold (input_thresh_fcst) does not exist in the list\n' ~ @@ -250,8 +293,6 @@ If input_thresh_fcst is set to a specific value: ' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %} {{metplus_macros.print_err_and_quit(error_msg)}} {%- endif %} - {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %} - {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %} {%- endif %} {#- @@ -310,9 +351,10 @@ ENS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; {%- endif %} {%- endif %} + {#- -Print out a newline to separate the settings for the current field (both -forecast and observation settings) from those for the next field. +Print out a newline to separate the settings for the current field from +those for the next field. #} {{- '\n' }} diff --git a/parm/metplus/GridStat_ensmean.conf b/parm/metplus/GridStat_ensmean.conf index 4b8c71ddab..6bbc20e3f8 100644 --- a/parm/metplus/GridStat_ensmean.conf +++ b/parm/metplus/GridStat_ensmean.conf @@ -119,70 +119,49 @@ script instead of a hard-coded value as below. {#- Jinja requires certain variables to be defined globally within the template -before they can be used in if-statements and other scopes (see Jinja -scoping rules). Define such variables. +before they can be used in if-statements and other scopes (see Jinja scoping +rules). Define such variables. #} -{%- set level_fcst = '' %} -{%- set level_obs = '' %} {%- set indx_level_fcst = '' %} - -{%- set valid_threshes_fcst = [] %} -{%- set valid_threshes_obs = [] %} -{%- set threshes_fcst = '' %} -{%- set threshes_obs = '' %} {%- set indx_input_thresh_fcst = '' %} - +{%- set error_msg = '' %} {%- set opts_indent = '' %} {%- set opts_indent_len = '' %} {%- set tmp = '' %} -{%- set error_msg = '' %} -{#- -Make sure that the set of field groups for forecasts and observations -are identical. -#} -{%- set fgs_fcst = vx_config_dict['fcst'].keys()|list %} -{%- set fgs_obs = vx_config_dict['obs'].keys()|list %} -{%- if (fgs_fcst != fgs_obs) %} - {%- set error_msg = '\n' ~ -'The list of valid field groups for forecasts (fgs_fcst) must be identical\n' ~ -'to that for observations (fgs_obs) but isn\'t:\n' ~ -' fgs_fcst = ' ~ fgs_fcst ~ '\n' ~ -' fgs_obs = ' ~ fgs_obs %} - {{metplus_macros.print_err_and_quit(error_msg)}} -{%- endif %} +{%- set field_fcst = '' %} +{%- set field_obs = '' %} +{%- set level_fcst = '' %} +{%- set level_obs = '' %} +{%- set thresh_fcst = '' %} +{%- set thresh_obs = '' %} + +{%- set threshes_fcst = [] %} +{%- set threshes_obs = [] %} {#- -Extract the lists of forecast and observation dictionaries containing -the valid fields, levels, and thresholds corresponding to the specified -field group (input_field_group). Note that it would be simpler to have -these be just dictionaries in which the keys are the field names (instead -of them being LISTS of dictionaries in which each dictionary contains a -single key that is the field name), but that approach cannot be used here -because it is possible for field names to be repeated (for both forecasts -and observations). For example, in the observations, the field name -'PRWE' appears more than once, each time with a different threshold, and -the combination of name and threshold is what constitutes a unique field, -not just the name by itself. +Get the set of valid field groups and ensure that the specified input +field group appears in this list. #} -{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %} -{%- set fields_levels_threshes_obs = vx_config_dict['obs'][input_field_group] %} +{%- set valid_field_groups = vx_config_dict.keys()|list %} +{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }} {#- -Reset the specified forecast level so that if it happens to be an -accumulation (e.g. 'A03'), the leading zeros in front of the hour are -stipped out (e.g. reset to 'A3'). +Reset the input forecast level so that if it happens to be an accumulation +(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g. +reset to 'A3'). #} {%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} {#- -Ensure that the specified input forecast level(s) (input_level_fcst) and -threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the -set(s) of valid forecast levels and thresholds, respectively, specified -in fields_levels_threshes_fcst. +Extract from the configuration dictionary the set (which itself is a +dictionary) of fields, levels, and thresholds corresponding to the input +field group. Then set the delimiter string that separates forecast and +observation values in the various items (i.e. dictionary keys and values +representing field names, levels, and thresholds) in this dictionary. #} -{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }} -{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }} +{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %} +{%- set delim_str = metplus_macros.set_delim_str() %} {#- Some fields in the specified field group (input_field_group) may need to @@ -200,72 +179,98 @@ following dictionary. {%- set fields_fcst_to_exclude = fields_fcst_to_exclude_by_field_group[input_field_group] %} {#- -For convenience, create lists of valid forecast and observation field -names. +Remove from the dictionary fields_levels_threshes_cpld any fields that +are in the list to be excluded. #} -{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %} -{%- set valid_fields_fcst = [] %} -{%- for i in range(0,num_valid_fields_fcst) %} - {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %} - {%- set tmp = valid_fields_fcst.append(field) %} -{%- endfor %} +{%- for field_cpld in fields_levels_threshes_cpld.copy() %} -{%- set valid_fields_obs = [] %} -{%- set num_valid_fields_obs = fields_levels_threshes_obs|length %} -{%- for i in range(0,num_valid_fields_obs) %} - {%- set field = fields_levels_threshes_obs[i].keys()|list|join('') %} - {%- set tmp = valid_fields_obs.append(field) %} -{%- endfor %} + {%- if delim_str in field_cpld %} + {%- set field_fcst, field_obs = field_cpld.split(delim_str) %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- set field_obs = field_cpld %} + {%- endif %} -{#- -Ensure that the number of valid fields for forecasts is equal to that -for the observations. -#} -{%- set num_valid_fields = 0 %} -{%- if (num_valid_fields_fcst != num_valid_fields_obs) %} - {%- set error_msg = '\n' ~ -'The number of valid forecast fields (num_valid_fields_fcst) must be\n' ~ -'equal to the number of valid observation fields (num_valid_fields_obs)\n' ~ -'but isn\'t:\n' ~ -' num_valid_fields_fcst = ' ~ num_valid_fields_fcst ~ '\n' ~ -' num_valid_fields_obs = ' ~ num_valid_fields_obs ~ '\n' ~ -'The lists of valid forecast and observation fields are:\n' ~ -' valid_fields_fcst = ' ~ valid_fields_fcst ~ '\n' ~ -' valid_fields_obs = ' ~ valid_fields_obs ~ '\n' %} - {{metplus_macros.print_err_and_quit(error_msg)}} -{%- else %} - {%- set num_valid_fields = num_valid_fields_fcst %} -{%- endif %} + {%- if field_fcst in fields_fcst_to_exclude %} + {%- set tmp = fields_levels_threshes_cpld.pop(field_cpld) %} + {%- endif %} + +{%- endfor %} {#- -Loop over the valid fields and set field names, levels, thresholds, and/ -or options for each field, both for forecasts and for obseratiions, in -the METplus configuration file. +Loop over the fields and set field names, levels, thresholds, and/or +options for each field, both for forecasts and for observations, in the +METplus configuration file. #} {%- set ns = namespace(var_count = 0) %} +{%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %} -{%- for i in range(0,num_valid_fields) if valid_fields_fcst[i] not in fields_fcst_to_exclude %} - - {%- set field_fcst = valid_fields_fcst[i] %} - {%- set field_obs = valid_fields_obs[i] %} + {%- if delim_str in field_cpld %} + {%- set field_fcst, field_obs = field_cpld.split(delim_str) %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- set field_obs = field_cpld %} + {%- endif %} {#- For convenience, create lists of valid forecast and observation levels -for the current field. Then check that the number of valid levels for -forecasts is the same as that for observations. +for the current field. +#} + {%- set valid_levels_fcst = [] %} + {%- set valid_levels_obs = [] %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + {%- set tmp = valid_levels_fcst.append(level_fcst) %} + {%- set tmp = valid_levels_obs.append(level_obs) %} + {%- endfor %} + +{#- +Make sure that the input forecast level (input_level_fcst) is set to a +valid value. #} - {%- set valid_levels_fcst = fields_levels_threshes_fcst[i][field_fcst].keys()|list %} - {%- set valid_levels_obs = fields_levels_threshes_obs[i][field_obs].keys()|list %} + {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %} + {%- set error_msg = '\n' ~ +'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~ +'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~ +'for the current forecast field (field_fcst). This is not the case:\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~ +' input_level_fcst = ' ~ input_level_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} {#- -Extract dictionary of valid forecast levels (the dictionary keys) and -corresponding lists of valid thresholds (the values) for each level. -Then loop over these levels and corresponding lists of thresholds to set -both the forecast and observation field names, levels, thresholds, and/or -options. +Loop over the (coupled) levels and corresponding lists of thresholds. +Extract from these the level values for forecasts and observations and +use them to set the forecast and observation field names, levels, +thresholds, and/or options in the METplus configuration file. #} - {%- set valid_levels_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst] %} - {%- for level_fcst, valid_threshes_fcst in valid_levels_threshes_fcst.items() %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + + {%- set valid_threshes_fcst = [] %} + {%- set valid_threshes_obs = [] %} + {%- for thresh_cpld in threshes_cpld %} + {%- if delim_str in thresh_cpld %} + {%- set thresh_fcst, thresh_obs = thresh_cpld.split(delim_str) %} + {%- else %} + {%- set thresh_fcst = thresh_cpld %} + {%- set thresh_obs = thresh_cpld %} + {%- endif %} + {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %} + {%- set tmp = valid_threshes_obs.append(thresh_obs) %} + {%- endfor %} {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %} {#- @@ -284,20 +289,20 @@ For example, if the same field, say APCP, is output at two different levels, say at A3 and A6 (for APCP, "levels" are really accumulation periods), there need to be two variables in the output file, and they obviously can't both be named "APCP", so GenEnsProd names one "APCP_A3" -and the other "APCP_A6". Here, the level is stored in the variable +and the other "APCP_A6". Here, the level is stored in the variable level_fcst and, below, is included in the name of the forecast field. For accumulated fields, the field name in the input forecast file contains TWO references to the accumulation period. The first is the level of the forecast field added by GenEnsProd as described above. The second is -another reference to this same level (accumulation period) but added by +another reference to this same level (accumulation period) but added by the MET/METplus's PcpCombine tool (whose output file is the input into GenEnsProd). PcpCombine adds this reference to the level (really the accumulation period) to the field's name for the same reason that GenEnsProd does, i.e. to ensure that the names of variables in the output -file are distinct. Here, this accumulation period is stored in the +file are distinct. Here, this accumulation period is stored in the variable accum_hh. Thus, for accumulated fields, below we add both -accum_hh and level_fcst to the field name to get an exact field name +accum_hh and level_fcst to the field name to get an exact field name match. #} {%- if (input_field_group in ['APCP', 'ASNOW']) %} @@ -326,17 +331,19 @@ to the full set of valid values. {%- set threshes_fcst = valid_threshes_fcst %} {#- If input_thresh_fcst is set to a specific value: - 1) Ensure that input_thresh_fcst exists in the list of valid forecast - thresholds. - 2) Get the index of input_thresh_fcst in the list of valid forecast - thresholds. This will be needed later below when setting the - observation threshold(s). - 3) Use this index to set the forecast threshold to a one-element list - containing the specified forecast threshold. +* If that value is valid, i.e. it exists in the list of valid forecast + thresholds, get its index in that list and use it to set the forecast + threshold to a one-element list containing that value. Note that the + index will be needed later below when setting the observation threshold(s). +* If the input forecast threshold is not valid, print out a warning message + and exit. #} {%- else %} - {%- if input_thresh_fcst not in valid_threshes_fcst %} + {%- if input_thresh_fcst in valid_threshes_fcst %} + {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %} + {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %} + {%- else %} {%- set error_msg = '\n' ~ 'For the current forecast field (field_fcst) and forecast level (level_fcst),\n' ~ 'the input forecast threshold (input_thresh_fcst) does not exist in the list\n' ~ @@ -347,8 +354,6 @@ If input_thresh_fcst is set to a specific value: ' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %} {{metplus_macros.print_err_and_quit(error_msg)}} {%- endif %} - {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %} - {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %} {%- endif %} {#- @@ -391,7 +396,7 @@ Set observation field name. Note that this has to exactly match the name of the field in the input observation file. For accumulated fields, the input observation file is generated by MET's -PcpCombine tool. In that file, the field name consists of the observation +PcpCombine tool. In that file, the field name consists of the observation field name here (field_obs) with the accumulation period appended to it (separated by an underscore), so we must do the same here to get an exact match. @@ -423,11 +428,6 @@ set to 'none'. #} {%- if (input_thresh_fcst != 'none') %} {#- -Set the list of valid observation thresholds to the one corresponding to -the current observation level (level_obs). -#} - {%- set valid_threshes_obs = fields_levels_threshes_obs[i][field_obs][level_obs] %} -{#- If input_thresh_fcst is set to 'all', set the list of observation thresholds to the full set of valid values. #} diff --git a/parm/metplus/GridStat_ensprob.conf b/parm/metplus/GridStat_ensprob.conf index 6a4873e446..a43b8ed340 100644 --- a/parm/metplus/GridStat_ensprob.conf +++ b/parm/metplus/GridStat_ensprob.conf @@ -133,121 +133,96 @@ Jinja requires certain variables to be defined globally within the template before they can be used in if-statements and other scopes (see Jinja scoping rules). Define such variables. #} -{%- set level_fcst = '' %} -{%- set level_obs = '' %} {%- set indx_level_fcst = '' %} +{%- set indx_thresh_fcst = '' %} +{%- set error_msg = '' %} +{%- set opts_indent = '' %} +{%- set opts_indent_len = '' %} -{%- set valid_threshes_fcst = [] %} -{%- set valid_threshes_obs = [] %} +{%- set field_fcst = '' %} +{%- set field_obs = '' %} +{%- set level_fcst = '' %} +{%- set level_obs = '' %} {%- set thresh_fcst = '' %} {%- set thresh_obs = '' %} -{%- set indx_thresh_fcst = '' %} {%- set thresh_fcst_and_or = '' %} -{%- set opts_indent = '' %} -{%- set opts_indent_len = '' %} -{%- set tmp = '' %} -{%- set error_msg = '' %} - {#- -Make sure that the set of field groups for forecasts and observations -are identical. +Get the set of valid field groups and ensure that the specified input +field group appears in this list. #} -{%- set fgs_fcst = vx_config_dict['fcst'].keys()|list %} -{%- set fgs_obs = vx_config_dict['obs'].keys()|list %} -{%- if (fgs_fcst != fgs_obs) %} - {%- set error_msg = '\n' ~ -'The list of valid field groups for forecasts (fgs_fcst) must be identical\n' ~ -'to that for observations (fgs_obs) but isn\'t:\n' ~ -' fgs_fcst = ' ~ fgs_fcst ~ '\n' ~ -' fgs_obs = ' ~ fgs_obs %} - {{metplus_macros.print_err_and_quit(error_msg)}} -{%- endif %} +{%- set valid_field_groups = vx_config_dict.keys()|list %} +{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }} {#- -Extract the lists of forecast and observation dictionaries containing -the valid fields, levels, and thresholds corresponding to the specified -field group (input_field_group). Note that it would be simpler to have -these be just dictionaries in which the keys are the field names (instead -of them being LISTS of dictionaries in which each dictionary contains a -single key that is the field name), but that approach cannot be used here -because it is possible for field names to be repeated (for both forecasts -and observations). For example, in the observations, the field name -'PRWE' appears more than once, each time with a different threshold, and -the combination of name and threshold is what constitutes a unique field, -not just the name by itself. +Reset the input forecast level so that if it happens to be an accumulation +(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g. +reset to 'A3'). #} -{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %} -{%- set fields_levels_threshes_obs = vx_config_dict['obs'][input_field_group] %} +{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} {#- -Reset the specified forecast level so that if it happens to be an -accumulation (e.g. 'A03'), the leading zeros in front of the hour are -stipped out (e.g. reset to 'A3'). +Extract from the configuration dictionary the set (which itself is a +dictionary) of fields, levels, and thresholds corresponding to the input +field group. Then set the delimiter string that separates forecast and +observation values in the various items (i.e. dictionary keys and values +representing field names, levels, and thresholds) in this dictionary. #} -{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} +{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %} +{%- set delim_str = metplus_macros.set_delim_str() %} {#- -Ensure that the specified input forecast level(s) (input_level_fcst) and -threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the -set(s) of valid forecast levels and thresholds, respectively, specified -in fields_levels_threshes_fcst. +Loop over the fields and set field names, levels, thresholds, and/or +options for each field, both for forecasts and for observations, in the +METplus configuration file. #} -{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }} -{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }} - +{%- set ns = namespace(var_count = 0) %} {#- -For convenience, create lists of valid forecast and observation field -names. +Loop over each field twice, the first treating the forecast field as +probabilistic and the second time as a scalar. #} -{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %} -{%- set valid_fields_fcst = [] %} -{%- for i in range(0,num_valid_fields_fcst) %} - {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %} - {%- set tmp = valid_fields_fcst.append(field) %} -{%- endfor %} +{%- for treat_fcst_as_prob in [True, False] %} -{%- set valid_fields_obs = [] %} -{%- set num_valid_fields_obs = fields_levels_threshes_obs|length %} -{%- for i in range(0,num_valid_fields_obs) %} - {%- set field = fields_levels_threshes_obs[i].keys()|list|join('') %} - {%- set tmp = valid_fields_obs.append(field) %} -{%- endfor %} + {%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %} -{#- -Ensure that the number of valid fields for forecasts is equal to that -for the observations. -#} -{%- set num_valid_fields = 0 %} -{%- if (num_valid_fields_fcst != num_valid_fields_obs) %} - {%- set error_msg = '\n' ~ -'The number of valid forecast fields (num_valid_fields_fcst) must be\n' ~ -'equal to the number of valid observation fields (num_valid_fields_obs)\n' ~ -'but isn\'t:\n' ~ -' num_valid_fields_fcst = ' ~ num_valid_fields_fcst ~ '\n' ~ -' num_valid_fields_obs = ' ~ num_valid_fields_obs ~ '\n' ~ -'The lists of valid forecast and observation fields are:\n' ~ -' valid_fields_fcst = ' ~ valid_fields_fcst ~ '\n' ~ -' valid_fields_obs = ' ~ valid_fields_obs ~ '\n' %} - {{metplus_macros.print_err_and_quit(error_msg)}} -{%- else %} - {%- set num_valid_fields = num_valid_fields_fcst %} -{%- endif %} + {%- if delim_str in field_cpld %} + {%- set field_fcst, field_obs = field_cpld.split(delim_str) %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- set field_obs = field_cpld %} + {%- endif %} {#- -Loop over the valid fields and set field names, levels, thresholds, and/ -or options for each field, both for forecasts and for obseratiions, in -the METplus configuration file. +For convenience, create lists of valid forecast and observation levels +for the current field. #} -{%- set ns = namespace(var_count = 0) %} + {%- set valid_levels_fcst = [] %} + {%- set valid_levels_obs = [] %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + {%- set tmp = valid_levels_fcst.append(level_fcst) %} + {%- set tmp = valid_levels_obs.append(level_obs) %} + {%- endfor %} {#- -Loop over each field twice, the first treating the forecast field as -probabilistic and the second time as a scalar. +Make sure that the input forecast level (input_level_fcst) is set to a +valid value. #} -{%- for treat_fcst_as_prob in [True, False] %} - - {%- for i in range(0,num_valid_fields) %} + {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %} + {%- set error_msg = '\n' ~ +'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~ +'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~ +'for the current forecast field (field_fcst). This is not the case:\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~ +' input_level_fcst = ' ~ input_level_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} {#- Add comment depending on whether or not the field is being treated @@ -265,26 +240,33 @@ probabilistically. # {%- endif %} - {%- set field_fcst = valid_fields_fcst[i] %} - {%- set field_obs = valid_fields_obs[i] %} - {#- -For convenience, create lists of valid forecast and observation levels -for the current field. Then check that the number of valid levels for -forecasts is the same as that for observations. +Loop over the (coupled) levels and corresponding lists of thresholds. +Extract from these the level values for forecasts and observations and +use them to set the forecast and observation field names, levels, +thresholds, and/or options in the METplus configuration file. #} - {%- set valid_levels_fcst = fields_levels_threshes_fcst[i][field_fcst].keys()|list %} - {%- set valid_levels_obs = fields_levels_threshes_obs[i][field_obs].keys()|list %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} -{#- -Extract dictionary of valid forecast levels (the dictionary keys) and -corresponding lists of valid thresholds (the values) for each level. -Then loop over these levels and corresponding lists of thresholds to set -both the forecast and observation field names, levels, thresholds, and/or -options. -#} - {%- set valid_levels_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst] %} - {%- for level_fcst, valid_threshes_fcst in valid_levels_threshes_fcst.items() %} + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + + {%- set valid_threshes_fcst = [] %} + {%- set valid_threshes_obs = [] %} + {%- for thresh_cpld in threshes_cpld %} + {%- if delim_str in thresh_cpld %} + {%- set thresh_fcst, thresh_obs = thresh_cpld.split(delim_str) %} + {%- else %} + {%- set thresh_fcst = thresh_cpld %} + {%- set thresh_obs = thresh_cpld %} + {%- endif %} + {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %} + {%- set tmp = valid_threshes_obs.append(thresh_obs) %} + {%- endfor %} {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %} @@ -307,20 +289,20 @@ For example, if the same field, say APCP, is output at two different levels, say at A3 and A6 (for APCP, "levels" are really accumulation periods), there need to be two variables in the output file, and they obviously can't both be named "APCP", so GenEnsProd names one "APCP_A3" -and the other "APCP_A6". Here, the level is stored in the variable +and the other "APCP_A6". Here, the level is stored in the variable level_fcst and, below, is included in the name of the forecast field. For accumulated fields, the field name in the input forecast file contains TWO references to the accumulation period. The first is the level of the forecast field added by GenEnsProd as described above. The second is -another reference to this same level (accumulation period) but added by +another reference to this same level (accumulation period) but added by the MET/METplus's PcpCombine tool (whose output file is the input into GenEnsProd). PcpCombine adds this reference to the level (really the accumulation period) to the field's name for the same reason that GenEnsProd does, i.e. to ensure that the names of variables in the output -file are distinct. Here, this accumulation period is stored in the +file are distinct. Here, this accumulation period is stored in the variable accum_hh. Thus, for accumulated fields, below we add both -accum_hh and level_fcst to the field name to get an exact field name +accum_hh and level_fcst to the field name to get an exact field name match. #} {%- set thresh_fcst_and_or = thresh_fcst|replace("&&", ".and.") %} @@ -368,7 +350,7 @@ Set observation field name. Note that this has to exactly match the name of the field in the input observation file. For accumulated fields, the input observation file is generated by MET's -PcpCombine tool. In that file, the field name consists of the observation +PcpCombine tool. In that file, the field name consists of the observation field name here (field_obs) with the accumulation period appended to it (separated by an underscore), so we must do the same here to get an exact match. @@ -400,11 +382,6 @@ set to 'none'. #} {%- if (input_thresh_fcst != 'none') %} {#- -Set the list of valid observation thresholds to the one corresponding to -the current observation level (level_obs). -#} - {%- set valid_threshes_obs = fields_levels_threshes_obs[i][field_obs][level_obs] %} -{#- Set the observation threshold. This is given by the element in the list of valid observation thresholds that has the same index as that of the current forcast threshold (thresh_fcst) in the list of valid forecast @@ -466,6 +443,7 @@ OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = lt-20.0; {%- endif %} {%- endif %} + {#- Print out a newline to separate the settings for the current field (both forecast and observation settings) from those for the next field. diff --git a/parm/metplus/GridStat_or_PointStat.conf b/parm/metplus/GridStat_or_PointStat.conf index c90783862b..39d34eb24f 100644 --- a/parm/metplus/GridStat_or_PointStat.conf +++ b/parm/metplus/GridStat_or_PointStat.conf @@ -189,7 +189,7 @@ OBTYPE = {{obtype}} {%- if input_field_group in ['APCP', 'ASNOW'] %} # Note that for accumulated fields such as APCP and ASNOW, in the input # forecast and observation files (which are generated by MET's PcpCombine -# tool) the accumulation period is appended to the field name, so the +# tool) the accumulation period is appended to the field name, so the # same is done here. # {%- endif %} @@ -216,154 +216,99 @@ Import the file containing jinja macros. {#- Jinja requires certain variables to be defined globally within the template -before they can be used in if-statements and other scopes (see Jinja -scoping rules). Define such variables. +before they can be used in if-statements and other scopes (see Jinja scoping +rules). Define such variables. #} -{%- set levels_fcst = '' %} -{%- set levels_obs = '' %} {%- set indx_input_level_fcst = '' %} +{%- set indx_input_thresh_fcst = '' %} +{%- set error_msg = '' %} +{%- set field_fcst = '' %} +{%- set field_obs = '' %} +{%- set level_fcst = '' %} +{%- set level_obs = '' %} +{%- set thresh_fcst = '' %} +{%- set thresh_obs = '' %} + +{%- set levels_fcst = '' %} +{%- set levels_obs = '' %} +{%- set threshes_cpld = [] %} {%- set valid_threshes_fcst = [] %} {%- set valid_threshes_obs = [] %} {%- set threshes_fcst = [] %} {%- set threshes_obs = [] %} -{%- set indx_input_thresh_fcst = '' %} - -{%- set opts_indent = '' %} -{%- set opts_indent_len = '' %} -{%- set tmp = '' %} -{%- set error_msg = '' %} -{#- -Make sure that the set of field groups for forecasts and observations -are identical. -#} -{%- set fgs_fcst = vx_config_dict['fcst'].keys()|list %} -{%- set fgs_obs = vx_config_dict['obs'].keys()|list %} -{%- if (fgs_fcst != fgs_obs) %} - {%- set error_msg = '\n' ~ -'The list of valid field groups for forecasts (fgs_fcst) must be identical\n' ~ -'to that for observations (fgs_obs) but isn\'t:\n' ~ -' fgs_fcst = ' ~ fgs_fcst ~ '\n' ~ -' fgs_obs = ' ~ fgs_obs %} - {{metplus_macros.print_err_and_quit(error_msg)}} -{%- endif %} {#- -Extract the lists of forecast and observation dictionaries containing -the valid fields, levels, and thresholds corresponding to the specified -field group (input_field_group). Note that it would be simpler to have -these be just dictionaries in which the keys are the field names (instead -of them being LISTS of dictionaries in which each dictionary contains a -single key that is the field name), but that approach cannot be used here -because it is possible for field names to be repeated (for both forecasts -and observations). For example, in the observations, the field name -'PRWE' appears more than once, each time with a different threshold, and -the combination of name and threshold is what constitutes a unique field, -not just the name by itself. +Get the set of valid field groups and ensure that the specified input +field group appears in this list. #} -{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %} -{%- set fields_levels_threshes_obs = vx_config_dict['obs'][input_field_group] %} +{%- set valid_field_groups = vx_config_dict.keys()|list %} +{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }} {#- -Reset the specified forecast level so that if it happens to be an -accumulation (e.g. 'A03'), the leading zeros in front of the hour are -stipped out (e.g. reset to 'A3'). +Reset the input forecast level so that if it happens to be an accumulation +(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g. +reset to 'A3'). #} {%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} {#- -Ensure that the specified input forecast level(s) (input_level_fcst) and -threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the -set(s) of valid forecast levels and thresholds, respectively, specified -in fields_levels_threshes_fcst. +Extract from the configuration dictionary the set (which itself is a +dictionary) of fields, levels, and thresholds corresponding to the input +field group. Then set the delimiter string that separates forecast and +observation values in the various items (i.e. dictionary keys and values +representing field names, levels, and thresholds) in this dictionary. #} -{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }} -{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }} +{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %} +{%- set delim_str = metplus_macros.set_delim_str() %} {#- -For convenience, create lists of valid forecast and observation field -names. -#} -{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %} -{%- set valid_fields_fcst = [] %} -{%- for i in range(0,num_valid_fields_fcst) %} - {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %} - {%- set tmp = valid_fields_fcst.append(field) %} -{%- endfor %} - -{%- set valid_fields_obs = [] %} -{%- set num_valid_fields_obs = fields_levels_threshes_obs|length %} -{%- for i in range(0,num_valid_fields_obs) %} - {%- set field = fields_levels_threshes_obs[i].keys()|list|join('') %} - {%- set tmp = valid_fields_obs.append(field) %} -{%- endfor %} - -{#- -Ensure that the number of valid fields for forecasts is equal to that -for the observations. -#} -{%- set num_valid_fields = 0 %} -{%- if (num_valid_fields_fcst != num_valid_fields_obs) %} - {%- set error_msg = '\n' ~ -'The number of valid forecast fields (num_valid_fields_fcst) must be\n' ~ -'equal to the number of valid observation fields (num_valid_fields_obs)\n' ~ -'but isn\'t:\n' ~ -' num_valid_fields_fcst = ' ~ num_valid_fields_fcst ~ '\n' ~ -' num_valid_fields_obs = ' ~ num_valid_fields_obs ~ '\n' ~ -'The lists of valid forecast and observation fields are:\n' ~ -' valid_fields_fcst = ' ~ valid_fields_fcst ~ '\n' ~ -' valid_fields_obs = ' ~ valid_fields_obs ~ '\n' %} - {{metplus_macros.print_err_and_quit(error_msg)}} -{%- else %} - {%- set num_valid_fields = num_valid_fields_fcst %} -{%- endif %} - -{#- -Loop over the valid fields and set field names, levels, thresholds, and/ -or options for each field, both for forecasts and for obseratiions, in -the METplus configuration file. +Loop over the fields and set field names, levels, thresholds, and/or +options for each field, both for forecasts and for observations, in the +METplus configuration file. #} {%- set ns = namespace(var_count = 0) %} -{%- for i in range(0,num_valid_fields) %} +{%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %} - {%- set field_fcst = valid_fields_fcst[i] %} - {%- set field_obs = valid_fields_obs[i] %} + {%- if delim_str in field_cpld %} + {%- set field_fcst, field_obs = field_cpld.split(delim_str) %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- set field_obs = field_cpld %} + {%- endif %} + + {%- set levels_cpld = levels_threshes_cpld.keys()|list %} + {%- set num_levels = levels_cpld|length %} {#- For convenience, create lists of valid forecast and observation levels -for the current field. Then check that the number of valid levels for -forecasts is the same as that for observations. +for the current field. #} - {%- set valid_levels_fcst = fields_levels_threshes_fcst[i][field_fcst].keys()|list %} - {%- set valid_levels_obs = fields_levels_threshes_obs[i][field_obs].keys()|list %} - - {%- set num_valid_levels = 0 %} - {%- set num_valid_levels_fcst = valid_levels_fcst|length %} - {%- set num_valid_levels_obs = valid_levels_obs|length %} - {%- if (num_valid_levels_fcst != num_valid_levels_obs) %} - {%- set error_msg = '\n' ~ -'The number of valid forecast levels (num_valid_levels_fcst) must be\n' ~ -'equal to the number of valid observation levels (num_valid_levels_obs)\n' ~ -'but isn\'t:\n' ~ -' num_valid_levels_fcst = ' ~ num_valid_levels_fcst ~ '\n' ~ -' num_valid_levels_obs = ' ~ num_valid_levels_obs ~ '\n' %} - {{metplus_macros.print_err_and_quit(error_msg)}} - {%- else %} - {%- set num_valid_levels = num_valid_levels_fcst %} - {%- endif %} + {%- set valid_levels_fcst = [] %} + {%- set valid_levels_obs = [] %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + {%- set tmp = valid_levels_fcst.append(level_fcst) %} + {%- set tmp = valid_levels_obs.append(level_obs) %} + {%- endfor %} {#- -Make sure that input_level_fcst is set to a valid value. +Make sure that the input forecast level (input_level_fcst) is set to a +valid value. #} {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %} {%- set error_msg = '\n' ~ -'The input forecast level (input_level_fcst) must either be set to \'all\',\n' ~ -'or it must be set to one of the elements in the list of valid levels\n' ~ -'(valid_levels_fcst) for the current forecast field (field_fcst). This\n' ~ -'is not the case:\n' ~ +'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~ +'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~ +'for the current forecast field (field_fcst). This is not the case:\n' ~ ' field_fcst = ' ~ field_fcst ~ '\n' ~ -' input_level_fcst = ' ~ input_level_fcst ~ '\n' ~ -' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' %} +' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~ +' input_level_fcst = ' ~ input_level_fcst ~ '\n' %} {{metplus_macros.print_err_and_quit(error_msg)}} {%- endif %} @@ -373,38 +318,43 @@ Increment the METplus variable counter. {%- set ns.var_count = ns.var_count+1 %} {#- -Set forecast field name. Note that this has to exactly match the name -of the field in the input forecast file. - -For accumulated fields, the input forecast file is generated by MET's -PcpCombine tool. In that file, the field name consists of the forecast -field name here (field_fcst) with the accumulation period appended to -it (separated by an underscore), so we must do the same here to get an -exact match. -#} - {%- if (input_field_group in ['APCP', 'ASNOW']) %} -FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{accum_hh}} - {%- else %} -FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}} - {%- endif %} - -{#- -Set forecast field level(s). +Set jinja parameters needed in setting the forecast and observation field +level(s). #} {%- if (input_level_fcst == 'all') %} {%- set levels_fcst = valid_levels_fcst %} + {%- set levels_obs = valid_levels_obs %} +{#- +If input_level_fcst is set to 'all' and there is more than one level to +be verified for the current field, then the list of forecast thresholds +for each forecast level must be identical to every other. Check for this. +Note that this restriction includes the order of the thresholds, i.e. the +set of thresholds for each level must be in the same order as for all +other levels. Once this is verified, we can set the index of the level +to use when obtaining thresholds to that of the first (index 0), which +will be valid both for the case of num_levels = 1 and num_levels > 1. +#} + {%- if (num_levels > 1) %} + {{- metplus_macros.check_for_identical_threshes_by_level( + field_cpld, levels_threshes_cpld) }} + {%- endif %} + {%- set indx_input_level_fcst = 0 %} {#- If input_level_fcst is set to a specific value: 1) Ensure that input_level_fcst exists in the list of valid forecast levels. 2) Get the index of input_level_fcst in the list of valid forecast - levels. This will be needed later below when setting the observation - level(s). - 3) Use this index to set the forecast level to a one-element list - containing the specified forecast level. + levels. + 3) Use this index to set the forecast and observation levels to one- + element lists containing the appropriate level values. #} {%- else %} - {%- if input_level_fcst not in valid_levels_fcst %} + + {%- if input_level_fcst in valid_levels_fcst %} + {%- set indx_input_level_fcst = valid_levels_fcst.index(input_level_fcst) %} + {%- set levels_fcst = [valid_levels_fcst[indx_input_level_fcst]] %} + {%- set levels_obs = [valid_levels_obs[indx_input_level_fcst]] %} + {%- else %} {%- set error_msg = '\n' ~ 'For the current forecast field (field_fcst), the input forecast level\n' ~ '(input_level_fcst) does not exist in the list of valid forecast levels\n' ~ @@ -414,31 +364,15 @@ If input_level_fcst is set to a specific value: ' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' %} {{metplus_macros.print_err_and_quit(error_msg)}} {%- endif %} - {%- set indx_input_level_fcst = valid_levels_fcst.index(input_level_fcst) %} - {%- set levels_fcst = [valid_levels_fcst[indx_input_level_fcst]] %} + {%- endif %} -FCST_VAR{{ns.var_count}}_LEVELS = {{levels_fcst|join(', ')}} {#- -Set forecast field threshold(s). Note that no forecast thresholds are -included in the METplus configuration file if input_thresh_fcst is set -to 'none'. +Set jinja parameters needed in setting the forecast and observation field +threshold(s). #} {%- if (input_thresh_fcst != 'none') %} {#- -If input_level_fcst is set to 'all' and there is more than one (forecast -or observation) level to be verified for the current (forecast or -observation) field, then the list of forecast thresholds for each forecast -level must be identical to every other. Check for this. Note that this -restriction includes the order of the thresholds, i.e. the set of -thresholds for each level must be in the same order as for all other -levels. -#} - {%- if (input_level_fcst == 'all') and (num_valid_levels > 1) %} - {{- metplus_macros.check_for_identical_threshes_by_level( - field_fcst, fields_levels_threshes_fcst[i]) }} - {%- endif %} -{#- Now set the list of valid forecast thresholds to the one corresponding to the first (zeroth) forecast level in the list of forecast levels set above. We can do this because, for the case of a single forecast level, @@ -446,29 +380,45 @@ there is only one list of forecast thresholds to consider (the first one), and for the case of all levels, all levels have the same set of thresholds (as verified by the check above). #} - {%- set valid_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst][levels_fcst[0]] %} + {%- set threshes_cpld = levels_threshes_cpld[levels_cpld[indx_input_level_fcst]] %} + {%- set valid_threshes_fcst = [] %} + {%- set valid_threshes_obs = [] %} + {%- for thresh_cpld in threshes_cpld %} + {%- if delim_str in thresh_cpld %} + {%- set thresh_fcst, thresh_obs = thresh_cpld.split(delim_str) %} + {%- else %} + {%- set thresh_fcst = thresh_cpld %} + {%- set thresh_obs = thresh_cpld %} + {%- endif %} + {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %} + {%- set tmp = valid_threshes_obs.append(thresh_obs) %} + {%- endfor %} {#- -If input_thresh_fcst is set to 'all', set the list of forecast thresholds -to the full set of valid values. +If input_thresh_fcst is set to 'all', set the list of forecast and +observation thresholds to the full set of valid values. #} {%- if (input_thresh_fcst == 'all') %} {%- set threshes_fcst = valid_threshes_fcst %} + {%- set threshes_obs = valid_threshes_obs %} {#- If input_thresh_fcst is set to a specific value: 1) Ensure that input_thresh_fcst exists in the list of valid forecast thresholds. 2) Get the index of input_thresh_fcst in the list of valid forecast - thresholds. This will be needed later below when setting the - observation threshold(s). - 3) Use this index to set the forecast threshold to a one-element list - containing the specified forecast threshold. + thresholds. + 3) Use this index to set the forecast and observation threshold to one- + element lists containing the appropriate threshold values. #} {%- else %} - {%- if input_thresh_fcst not in valid_threshes_fcst %} + {%- if input_thresh_fcst in valid_threshes_fcst %} + {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %} + {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %} + {%- set threshes_obs = [valid_threshes_obs[indx_input_thresh_fcst]] %} + {%- else %} {%- set error_msg = '\n' ~ -'For the current forecast field (field_fcst) and list of forecast level(s)\n' ~ +'For the current forecast field (field_fcst) and list of forecast levels\n' ~ '(levels_fcst), the input forecast threshold (input_thresh_fcst) does not\n' ~ 'exist in the list of valid forecast thresholds (valid_threshes_fcst):\n' ~ ' field_fcst = ' ~ field_fcst ~ '\n' ~ @@ -477,22 +427,47 @@ If input_thresh_fcst is set to a specific value: ' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %} {{metplus_macros.print_err_and_quit(error_msg)}} {%- endif %} - {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %} - {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %} {%- endif %} + + {%- endif %} + +{#- +Set forecast field name. Note that this has to exactly match the name +of the field in the input forecast file. + +For accumulated fields, the input forecast file is generated by MET's +PcpCombine tool. In that file, the field name consists of the forecast +field name here (field_fcst) with the accumulation period appended to +it (separated by an underscore), so we must do the same here to get an +exact match. +#} + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{accum_hh}} + {%- else %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}} + {%- endif %} + +{#- +Set forecast field level(s). +#} +FCST_VAR{{ns.var_count}}_LEVELS = {{levels_fcst|join(', ')}} + {#- -If threshes_fcst has been reset to something other than its default -value of an empty list, then set the forecast thresholds in the METplus -configuration file because that implies threshes_fcst was set above to -a non-empty value. Then reset threshes_fcst to its default value for -proper processing of thresholds for the next field. +Set forecast field threshold(s). Note that: +1) No forecast thresholds are included in the METplus configuration file + if input_thresh_fcst is set to 'none'. +2) If threshes_fcst has been reset to something other than its default value + value of an empty list, then set the forecast thresholds in the METplus + configuration file because that implies threshes_fcst was set above to + a non-empty value. Then reset threshes_fcst to its default value for + proper processing of thresholds for the next field. #} + {%- if (input_thresh_fcst != 'none') %} {%- if (threshes_fcst != []) %} FCST_VAR{{ns.var_count}}_THRESH = {{threshes_fcst|join(', ')}} {%- endif %} {%- set threshes_fcst = [] %} - {%- endif %} {#- @@ -560,7 +535,7 @@ Set observation field name. Note that this has to exactly match the name of the field in the input observation file. For accumulated fields, the input observation file is generated by MET's -PcpCombine tool. In that file, the field name consists of the observation +PcpCombine tool. In that file, the field name consists of the observation field name here (field_obs) with the accumulation period appended to it (separated by an underscore), so we must do the same here to get an exact match. @@ -581,77 +556,23 @@ OBS_VAR{{ns.var_count}}_NAME = {{field_obs}} {#- Set observation field level(s). #} - {%- if (input_level_fcst == 'all') %} - {%- set levels_obs = valid_levels_obs %} -{#- -If input_level_fcst is set to a specific forecast level, then the -observation level is given by the element in the list of valid observation -levels that has the same index as that of input_level_fcst in the list -of valid forecast levels. -#} - {%- else %} - {%- set levels_obs = [valid_levels_obs[indx_input_level_fcst]] %} - {%- endif %} OBS_VAR{{ns.var_count}}_LEVELS = {{levels_obs|join(', ')}} {#- -Set observation field threshold(s). Note that no observation thresholds -are included in the METplus configuration file if input_thresh_fcst is -set to 'none'. +Set observation field threshold(s). Note that: +1) No observation thresholds are included in the METplus configuration + file if input_thresh_fcst is set to 'none'. +2) If threshes_obs has been reset to something other than its default value + of an empty list, then we set the observation thresholds in the METplus + configuration file because that implies threshes_obs was set above to + a non-empty value. Then reset threshes_obs to its default value for + proper processing of thresholds for the next field. #} {%- if (input_thresh_fcst != 'none') %} -{#- -If input_level_fcst is set to 'all' and there is more than one (forecast -or observation) level to be verified for the current (forecast or -observation) field, then the list of observation thresholds for each -observation level must be identical to every other. Check for this. -Note that this restriction includes the order of the thresholds, i.e. -the set of thresholds for each level must be in the same order as for -all other levels. -#} - {%- if (input_level_fcst == 'all') and (num_valid_levels > 1) %} - {{- metplus_macros.check_for_identical_threshes_by_level( - field_obs, fields_levels_threshes_obs[i]) }} - {%- endif %} -{#- -Now set the list of valid observation thresholds to the one corresponding -to the first (zeroth) observation level in the list of observation levels -set above. We can do this because, for the case of a single observaton -level, there is only one list of observation thresholds to consider (the -first one), and for the case of all levels, all levels have the same set -of thresholds (as verified by the check above). -#} - {%- set valid_threshes_obs = fields_levels_threshes_obs[i][field_obs][levels_obs[0]] %} -{#- -If input_thresh_fcst is set to 'all', set the list of observation thresholds -to the full set of valid values. -#} - {%- if (input_thresh_fcst == 'all') %} - - {%- set threshes_obs = valid_threshes_obs %} -{#- -If input_thresh_fcst is set to a specific forecast threshold, then the -observation threshold is given by the element in the list of valid -observation thresholds that has the same index as that of input_thresh_fcst -in the list of valid forecast thresholds. -#} - {%- else %} - - {%- set threshes_obs = [valid_threshes_obs[indx_input_thresh_fcst]] %} - - {%- endif %} -{#- -If threshes_obs has been reset to something other than its default value -of an empty list, then set the observation thresholds in the METplus -configuration file because that implies threshes_obs was set above to -a non-empty value. Then reset threshes_obs to its default value for -proper processing of thresholds for the next field. -#} {%- if (threshes_obs != []) %} OBS_VAR{{ns.var_count}}_THRESH = {{threshes_obs|join(', ')}} {%- endif %} {%- set threshes_obs = [] %} - {%- endif %} {#- @@ -721,6 +642,8 @@ forecast and observation settings) from those for the next field. {%- endfor %} + + {%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} {%- if (input_field_group in ['APCP', 'ASNOW']) %} # diff --git a/parm/metplus/PointStat_ensmean.conf b/parm/metplus/PointStat_ensmean.conf index 67a20034df..b16a481dbd 100644 --- a/parm/metplus/PointStat_ensmean.conf +++ b/parm/metplus/PointStat_ensmean.conf @@ -183,70 +183,49 @@ script instead of a hard-coded value as below. {#- Jinja requires certain variables to be defined globally within the template -before they can be used in if-statements and other scopes (see Jinja -scoping rules). Define such variables. +before they can be used in if-statements and other scopes (see Jinja scoping +rules). Define such variables. #} -{%- set level_fcst = '' %} -{%- set level_obs = '' %} {%- set indx_level_fcst = '' %} - -{%- set valid_threshes_fcst = [] %} -{%- set valid_threshes_obs = [] %} -{%- set threshes_fcst = '' %} -{%- set threshes_obs = '' %} {%- set indx_input_thresh_fcst = '' %} - +{%- set error_msg = '' %} {%- set opts_indent = '' %} {%- set opts_indent_len = '' %} {%- set tmp = '' %} -{%- set error_msg = '' %} -{#- -Make sure that the set of field groups for forecasts and observations -are identical. -#} -{%- set fgs_fcst = vx_config_dict['fcst'].keys()|list %} -{%- set fgs_obs = vx_config_dict['obs'].keys()|list %} -{%- if (fgs_fcst != fgs_obs) %} - {%- set error_msg = '\n' ~ -'The list of valid field groups for forecasts (fgs_fcst) must be identical\n' ~ -'to that for observations (fgs_obs) but isn\'t:\n' ~ -' fgs_fcst = ' ~ fgs_fcst ~ '\n' ~ -' fgs_obs = ' ~ fgs_obs %} - {{metplus_macros.print_err_and_quit(error_msg)}} -{%- endif %} +{%- set field_fcst = '' %} +{%- set field_obs = '' %} +{%- set level_fcst = '' %} +{%- set level_obs = '' %} +{%- set thresh_fcst = '' %} +{%- set thresh_obs = '' %} + +{%- set threshes_fcst = [] %} +{%- set threshes_obs = [] %} {#- -Extract the lists of forecast and observation dictionaries containing -the valid fields, levels, and thresholds corresponding to the specified -field group (input_field_group). Note that it would be simpler to have -these be just dictionaries in which the keys are the field names (instead -of them being LISTS of dictionaries in which each dictionary contains a -single key that is the field name), but that approach cannot be used here -because it is possible for field names to be repeated (for both forecasts -and observations). For example, in the observations, the field name -'PRWE' appears more than once, each time with a different threshold, and -the combination of name and threshold is what constitutes a unique field, -not just the name by itself. +Get the set of valid field groups and ensure that the specified input +field group appears in this list. #} -{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %} -{%- set fields_levels_threshes_obs = vx_config_dict['obs'][input_field_group] %} +{%- set valid_field_groups = vx_config_dict.keys()|list %} +{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }} {#- -Reset the specified forecast level so that if it happens to be an -accumulation (e.g. 'A03'), the leading zeros in front of the hour are -stipped out (e.g. reset to 'A3'). +Reset the input forecast level so that if it happens to be an accumulation +(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g. +reset to 'A3'). #} {%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} {#- -Ensure that the specified input forecast level(s) (input_level_fcst) and -threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the -set(s) of valid forecast levels and thresholds, respectively, specified -in fields_levels_threshes_fcst. +Extract from the configuration dictionary the set (which itself is a +dictionary) of fields, levels, and thresholds corresponding to the input +field group. Then set the delimiter string that separates forecast and +observation values in the various items (i.e. dictionary keys and values +representing field names, levels, and thresholds) in this dictionary. #} -{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }} -{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }} +{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %} +{%- set delim_str = metplus_macros.set_delim_str() %} {#- Some fields in the specified field group (input_field_group) may need to @@ -264,72 +243,98 @@ following dictionary. {%- set fields_fcst_to_exclude = fields_fcst_to_exclude_by_field_group[input_field_group] %} {#- -For convenience, create lists of valid forecast and observation field -names. +Remove from the dictionary fields_levels_threshes_cpld any fields that +are in the list to be excluded. #} -{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %} -{%- set valid_fields_fcst = [] %} -{%- for i in range(0,num_valid_fields_fcst) %} - {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %} - {%- set tmp = valid_fields_fcst.append(field) %} -{%- endfor %} +{%- for field_cpld in fields_levels_threshes_cpld.copy() %} -{%- set valid_fields_obs = [] %} -{%- set num_valid_fields_obs = fields_levels_threshes_obs|length %} -{%- for i in range(0,num_valid_fields_obs) %} - {%- set field = fields_levels_threshes_obs[i].keys()|list|join('') %} - {%- set tmp = valid_fields_obs.append(field) %} -{%- endfor %} + {%- if delim_str in field_cpld %} + {%- set field_fcst, field_obs = field_cpld.split(delim_str) %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- set field_obs = field_cpld %} + {%- endif %} -{#- -Ensure that the number of valid fields for forecasts is equal to that -for the observations. -#} -{%- set num_valid_fields = 0 %} -{%- if (num_valid_fields_fcst != num_valid_fields_obs) %} - {%- set error_msg = '\n' ~ -'The number of valid forecast fields (num_valid_fields_fcst) must be\n' ~ -'equal to the number of valid observation fields (num_valid_fields_obs)\n' ~ -'but isn\'t:\n' ~ -' num_valid_fields_fcst = ' ~ num_valid_fields_fcst ~ '\n' ~ -' num_valid_fields_obs = ' ~ num_valid_fields_obs ~ '\n' ~ -'The lists of valid forecast and observation fields are:\n' ~ -' valid_fields_fcst = ' ~ valid_fields_fcst ~ '\n' ~ -' valid_fields_obs = ' ~ valid_fields_obs ~ '\n' %} - {{metplus_macros.print_err_and_quit(error_msg)}} -{%- else %} - {%- set num_valid_fields = num_valid_fields_fcst %} -{%- endif %} + {%- if field_fcst in fields_fcst_to_exclude %} + {%- set tmp = fields_levels_threshes_cpld.pop(field_cpld) %} + {%- endif %} + +{%- endfor %} {#- -Loop over the valid fields and set field names, levels, thresholds, and/ -or options for each field, both for forecasts and for obseratiions, in -the METplus configuration file. +Loop over the fields and set field names, levels, thresholds, and/or +options for each field, both for forecasts and for observations, in the +METplus configuration file. #} {%- set ns = namespace(var_count = 0) %} +{%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %} -{%- for i in range(0,num_valid_fields) if valid_fields_fcst[i] not in fields_fcst_to_exclude %} - - {%- set field_fcst = valid_fields_fcst[i] %} - {%- set field_obs = valid_fields_obs[i] %} + {%- if delim_str in field_cpld %} + {%- set field_fcst, field_obs = field_cpld.split(delim_str) %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- set field_obs = field_cpld %} + {%- endif %} {#- For convenience, create lists of valid forecast and observation levels -for the current field. Then check that the number of valid levels for -forecasts is the same as that for observations. +for the current field. +#} + {%- set valid_levels_fcst = [] %} + {%- set valid_levels_obs = [] %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + {%- set tmp = valid_levels_fcst.append(level_fcst) %} + {%- set tmp = valid_levels_obs.append(level_obs) %} + {%- endfor %} + +{#- +Make sure that the input forecast level (input_level_fcst) is set to a +valid value. #} - {%- set valid_levels_fcst = fields_levels_threshes_fcst[i][field_fcst].keys()|list %} - {%- set valid_levels_obs = fields_levels_threshes_obs[i][field_obs].keys()|list %} + {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %} + {%- set error_msg = '\n' ~ +'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~ +'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~ +'for the current forecast field (field_fcst). This is not the case:\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~ +' input_level_fcst = ' ~ input_level_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} {#- -Extract dictionary of valid forecast levels (the dictionary keys) and -corresponding lists of valid thresholds (the values) for each level. -Then loop over these levels and corresponding lists of thresholds to set -both the forecast and observation field names, levels, thresholds, and/or -options. +Loop over the (coupled) levels and corresponding lists of thresholds. +Extract from these the level values for forecasts and observations and +use them to set the forecast and observation field names, levels, +thresholds, and/or options in the METplus configuration file. #} - {%- set valid_levels_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst] %} - {%- for level_fcst, valid_threshes_fcst in valid_levels_threshes_fcst.items() %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + + {%- set valid_threshes_fcst = [] %} + {%- set valid_threshes_obs = [] %} + {%- for thresh_cpld in threshes_cpld %} + {%- if delim_str in thresh_cpld %} + {%- set thresh_fcst, thresh_obs = thresh_cpld.split(delim_str) %} + {%- else %} + {%- set thresh_fcst = thresh_cpld %} + {%- set thresh_obs = thresh_cpld %} + {%- endif %} + {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %} + {%- set tmp = valid_threshes_obs.append(thresh_obs) %} + {%- endfor %} {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %} {#- @@ -362,17 +367,19 @@ to the full set of valid values. {%- set threshes_fcst = valid_threshes_fcst %} {#- If input_thresh_fcst is set to a specific value: - 1) Ensure that input_thresh_fcst exists in the list of valid forecast - thresholds. - 2) Get the index of input_thresh_fcst in the list of valid forecast - thresholds. This will be needed later below when setting the - observation threshold(s). - 3) Use this index to set the forecast threshold to a one-element list - containing the specified forecast threshold. +* If that value is valid, i.e. it exists in the list of valid forecast + thresholds, get its index in that list and use it to set the forecast + threshold to a one-element list containing that value. Note that the + index will be needed later below when setting the observation threshold(s). +* If the input forecast threshold is not valid, print out a warning message + and exit. #} {%- else %} - {%- if input_thresh_fcst not in valid_threshes_fcst %} + {%- if input_thresh_fcst in valid_threshes_fcst %} + {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %} + {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %} + {%- else %} {%- set error_msg = '\n' ~ 'For the current forecast field (field_fcst) and forecast level (level_fcst),\n' ~ 'the input forecast threshold (input_thresh_fcst) does not exist in the list\n' ~ @@ -383,8 +390,6 @@ If input_thresh_fcst is set to a specific value: ' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %} {{metplus_macros.print_err_and_quit(error_msg)}} {%- endif %} - {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %} - {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %} {%- endif %} {#- @@ -441,11 +446,6 @@ set to 'none'. #} {%- if (input_thresh_fcst != 'none') %} {#- -Set the list of valid observation thresholds to the one corresponding to -the current observation level (level_obs). -#} - {%- set valid_threshes_obs = fields_levels_threshes_obs[i][field_obs][level_obs] %} -{#- If input_thresh_fcst is set to 'all', set the list of observation thresholds to the full set of valid values. #} @@ -459,9 +459,7 @@ observation thresholds that has the same index as that of input_thresh_fcst in the list of valid forecast thresholds. #} {%- else %} - {%- set threshes_obs = [valid_threshes_obs[indx_input_thresh_fcst]] %} - {%- endif %} {#- If threshes_obs has been reset to something other than its default value diff --git a/parm/metplus/PointStat_ensprob.conf b/parm/metplus/PointStat_ensprob.conf index 69ef9fd5db..84b9f3954d 100644 --- a/parm/metplus/PointStat_ensprob.conf +++ b/parm/metplus/PointStat_ensprob.conf @@ -185,117 +185,53 @@ script instead of a hard-coded value as below. {#- Jinja requires certain variables to be defined globally within the template -before they can be used in if-statements and other scopes (see Jinja -scoping rules). Define such variables. +before they can be used in if-statements and other scopes (see Jinja scoping +rules). Define such variables. #} -{%- set level_fcst = '' %} -{%- set level_obs = '' %} {%- set indx_level_fcst = '' %} - -{%- set valid_threshes_fcst = [] %} -{%- set valid_threshes_obs = [] %} -{%- set thresh_fcst = '' %} -{%- set thresh_obs = '' %} {%- set indx_thresh_fcst = '' %} -{%- set thresh_fcst_and_or = '' %} - +{%- set error_msg = '' %} {%- set opts_indent = '' %} {%- set opts_indent_len = '' %} -{%- set tmp = '' %} -{%- set error_msg = '' %} -{#- -Make sure that the set of field groups for forecasts and observations -are identical. -#} -{%- set fgs_fcst = vx_config_dict['fcst'].keys()|list %} -{%- set fgs_obs = vx_config_dict['obs'].keys()|list %} -{%- if (fgs_fcst != fgs_obs) %} - {%- set error_msg = '\n' ~ -'The list of valid field groups for forecasts (fgs_fcst) must be identical\n' ~ -'to that for observations (fgs_obs) but isn\'t:\n' ~ -' fgs_fcst = ' ~ fgs_fcst ~ '\n' ~ -' fgs_obs = ' ~ fgs_obs %} - {{metplus_macros.print_err_and_quit(error_msg)}} -{%- endif %} +{%- set field_fcst = '' %} +{%- set field_obs = '' %} +{%- set level_fcst = '' %} +{%- set level_obs = '' %} +{%- set thresh_fcst = '' %} +{%- set thresh_obs = '' %} +{%- set thresh_fcst_and_or = '' %} {#- -Extract the lists of forecast and observation dictionaries containing -the valid fields, levels, and thresholds corresponding to the specified -field group (input_field_group). Note that it would be simpler to have -these be just dictionaries in which the keys are the field names (instead -of them being LISTS of dictionaries in which each dictionary contains a -single key that is the field name), but that approach cannot be used here -because it is possible for field names to be repeated (for both forecasts -and observations). For example, in the observations, the field name -'PRWE' appears more than once, each time with a different threshold, and -the combination of name and threshold is what constitutes a unique field, -not just the name by itself. +Get the set of valid field groups and ensure that the specified input +field group appears in this list. #} -{%- set fields_levels_threshes_fcst = vx_config_dict['fcst'][input_field_group] %} -{%- set fields_levels_threshes_obs = vx_config_dict['obs'][input_field_group] %} +{%- set valid_field_groups = vx_config_dict.keys()|list %} +{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }} {#- -Reset the specified forecast level so that if it happens to be an -accumulation (e.g. 'A03'), the leading zeros in front of the hour are -stipped out (e.g. reset to 'A3'). +Reset the input forecast level so that if it happens to be an accumulation +(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g. +reset to 'A3'). #} {%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} {#- -Ensure that the specified input forecast level(s) (input_level_fcst) and -threshold(s) (input_thresh_fcst) are valid, i.e. that they are in the -set(s) of valid forecast levels and thresholds, respectively, specified -in fields_levels_threshes_fcst. +Extract from the configuration dictionary the set (which itself is a +dictionary) of fields, levels, and thresholds corresponding to the input +field group. Then set the delimiter string that separates forecast and +observation values in the various items (i.e. dictionary keys and values +representing field names, levels, and thresholds) in this dictionary. #} -{{- metplus_macros.check_level(fields_levels_threshes_fcst, input_level_fcst) }} -{{- metplus_macros.check_thresh(fields_levels_threshes_fcst, input_level_fcst, input_thresh_fcst) }} +{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %} +{%- set delim_str = metplus_macros.set_delim_str() %} {#- -For convenience, create lists of valid forecast and observation field -names. -#} -{%- set num_valid_fields_fcst = fields_levels_threshes_fcst|length %} -{%- set valid_fields_fcst = [] %} -{%- for i in range(0,num_valid_fields_fcst) %} - {%- set field = fields_levels_threshes_fcst[i].keys()|list|join('') %} - {%- set tmp = valid_fields_fcst.append(field) %} -{%- endfor %} - -{%- set valid_fields_obs = [] %} -{%- set num_valid_fields_obs = fields_levels_threshes_obs|length %} -{%- for i in range(0,num_valid_fields_obs) %} - {%- set field = fields_levels_threshes_obs[i].keys()|list|join('') %} - {%- set tmp = valid_fields_obs.append(field) %} -{%- endfor %} - -{#- -Ensure that the number of valid fields for forecasts is equal to that -for the observations. -#} -{%- set num_valid_fields = 0 %} -{%- if (num_valid_fields_fcst != num_valid_fields_obs) %} - {%- set error_msg = '\n' ~ -'The number of valid forecast fields (num_valid_fields_fcst) must be\n' ~ -'equal to the number of valid observation fields (num_valid_fields_obs)\n' ~ -'but isn\'t:\n' ~ -' num_valid_fields_fcst = ' ~ num_valid_fields_fcst ~ '\n' ~ -' num_valid_fields_obs = ' ~ num_valid_fields_obs ~ '\n' ~ -'The lists of valid forecast and observation fields are:\n' ~ -' valid_fields_fcst = ' ~ valid_fields_fcst ~ '\n' ~ -' valid_fields_obs = ' ~ valid_fields_obs ~ '\n' %} - {{metplus_macros.print_err_and_quit(error_msg)}} -{%- else %} - {%- set num_valid_fields = num_valid_fields_fcst %} -{%- endif %} - -{#- -Loop over the valid fields and set field names, levels, thresholds, and/ -or options for each field, both for forecasts and for obseratiions, in -the METplus configuration file. +Loop over the fields and set field names, levels, thresholds, and/or +options for each field, both for forecasts and for observations, in the +METplus configuration file. #} {%- set ns = namespace(var_count = 0) %} - {#- This outer for-loop is included to make this code as similar as possible to the one in GridStat_ensprob.conf. There, treat_fcst_as_prob takes on @@ -305,28 +241,74 @@ need to be set to False. This is being investigated (12/13/2023). #} {%- for treat_fcst_as_prob in [True] %} - {%- for i in range(0,num_valid_fields) %} + {%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %} - {%- set field_fcst = valid_fields_fcst[i] %} - {%- set field_obs = valid_fields_obs[i] %} + {%- if delim_str in field_cpld %} + {%- set field_fcst, field_obs = field_cpld.split(delim_str) %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- set field_obs = field_cpld %} + {%- endif %} {#- For convenience, create lists of valid forecast and observation levels -for the current field. Then check that the number of valid levels for -forecasts is the same as that for observations. +for the current field. +#} + {%- set valid_levels_fcst = [] %} + {%- set valid_levels_obs = [] %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + {%- set tmp = valid_levels_fcst.append(level_fcst) %} + {%- set tmp = valid_levels_obs.append(level_obs) %} + {%- endfor %} + +{#- +Make sure that the input forecast level (input_level_fcst) is set to a +valid value. #} - {%- set valid_levels_fcst = fields_levels_threshes_fcst[i][field_fcst].keys()|list %} - {%- set valid_levels_obs = fields_levels_threshes_obs[i][field_obs].keys()|list %} + {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %} + {%- set error_msg = '\n' ~ +'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~ +'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~ +'for the current forecast field (field_fcst). This is not the case:\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~ +' input_level_fcst = ' ~ input_level_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} {#- -Extract dictionary of valid forecast levels (the dictionary keys) and -corresponding lists of valid thresholds (the values) for each level. -Then loop over these levels and corresponding lists of thresholds to set -both the forecast and observation field names, levels, thresholds, and/or -options. +Loop over the (coupled) levels and corresponding lists of thresholds. +Extract from these the level values for forecasts and observations and +use them to set the forecast and observation field names, levels, +thresholds, and/or options in the METplus configuration file. #} - {%- set valid_levels_threshes_fcst = fields_levels_threshes_fcst[i][field_fcst] %} - {%- for level_fcst, valid_threshes_fcst in valid_levels_threshes_fcst.items() %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + + {%- set valid_threshes_fcst = [] %} + {%- set valid_threshes_obs = [] %} + {%- for thresh_cpld in threshes_cpld %} + {%- if delim_str in thresh_cpld %} + {%- set thresh_fcst, thresh_obs = thresh_cpld.split(delim_str) %} + {%- else %} + {%- set thresh_fcst = thresh_cpld %} + {%- set thresh_obs = thresh_cpld %} + {%- endif %} + {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %} + {%- set tmp = valid_threshes_obs.append(thresh_obs) %} + {%- endfor %} {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %} @@ -402,11 +384,6 @@ set to 'none'. #} {%- if (input_thresh_fcst != 'none') %} {#- -Set the list of valid observation thresholds to the one corresponding to -the current observation level (level_obs). -#} - {%- set valid_threshes_obs = fields_levels_threshes_obs[i][field_obs][level_obs] %} -{#- Set the observation threshold. This is given by the element in the list of valid observation thresholds that has the same index as that of the current forcast threshold (thresh_fcst) in the list of valid forecast @@ -442,6 +419,7 @@ OBS_VAR{{ns.var_count}}_OPTIONS = desc = "TKE"; {%- endif %} {%- endif %} + {#- Print out a newline to separate the settings for the current field (both forecast and observation settings) from those for the next field. diff --git a/parm/metplus/metplus_macros.jinja b/parm/metplus/metplus_macros.jinja index 94ac5d9485..4dc8c599ce 100644 --- a/parm/metplus/metplus_macros.jinja +++ b/parm/metplus/metplus_macros.jinja @@ -1,3 +1,13 @@ +{#- +Set the string delimiter that separates the forecast value of an item +(e.g. a field name, level, or threshold) from its observation value in the +various items in the deterministic and ensemble verification configuration +files. +#} +{%- macro set_delim_str() %} + {{-'%%'}} +{%- endmacro %} + {#- This macro prints out an error message and quits the jinja templater. #} @@ -19,114 +29,32 @@ prints out 'A3'. {{- level }} {%- endif %} {%- endmacro %} -{#- -This macro checks whether the specified level (input_level) has a valid -value. input_level may be set to 'all' or to a specific level. If set -to 'all', input_level is not checked because in this case, whatever valid/ -available levels are found will be included in the METplus configuration -file for all specified fields. input_level IS checked if it is set to -any other value because in this case, all the specified fields will use -only that specific level in the METplus configuration file, which implies -that the level must be valid for all such fields. -#} -{%- macro check_level(fields_levels_threshes, input_level) %} - {%- if input_level != 'all' %} - - {%- set num_valid_fields = fields_levels_threshes|length %} - {%- set valid_fields = [] %} - {%- for i in range(0,num_valid_fields) %} - {%- set field = fields_levels_threshes[i].keys()|list|join('') %} - {%- set tmp = valid_fields.append(field) %} - {%- endfor %} - - {%- for i in range(0,num_valid_fields) %} - {%- set field = valid_fields[i] %} - {%- set valid_levels = fields_levels_threshes[i][field].keys()|list %} - {%- if input_level not in valid_levels %} - {%- set error_msg = '\n' ~ - 'The specified level (input_level) is not in the list of valid levels\n' ~ - '(valid_levels) for the current field (field):\n' ~ - ' field = \'' ~ field ~ '\'\n' ~ - ' valid_levels = ' ~ valid_levels ~ '\n' - ' input_level = \'' ~ input_level ~ '\'\n' - 'input_level must either be set to the string \'all\' (to include all valid\n' ~ - 'values in the verification) or to one of the elements in valid_levels.' %} - {{print_err_and_quit(error_msg)}} - {%- endif %} - {%- endfor %} - - {%- endif %} - -{%- endmacro %} {#- This macro checks whether the specified threshold (input_thresh) has a -valid value. input_thresh may be set to 'none', 'all', or a specific -threshold. If set to 'none', input_thresh is not checked for a valid -value since threshold information will not be included in the METplus -configuration file. input_thresh is also not checked for a valid value -if it set to 'all' because in this case, whatever valid/available thresholds -are found will be included in the METplus configuration file for all -specified field and level combination. Finally, input_thresh IS checked -for a valid value if it is set to something other than 'none' and 'all' -because in this case, all specified field and level combinations (where -the latter, depending on the value of input_level, may be either all -valid/available levels or a single one) will use only that specific -threshold in the METplus configuration file, which implies that the -threshold must be valid for all such field and level combinations. #} -{%- macro check_thresh(fields_levels_threshes, input_level, input_thresh) %} - - {%- if (input_thresh != 'none') and (input_thresh != 'all') %} - - {%- set num_valid_fields = fields_levels_threshes|length %} - {%- set valid_fields = [] %} - {%- for i in range(0,num_valid_fields) %} - {%- set field = fields_levels_threshes[i].keys()|list|join('') %} - {%- set tmp = valid_fields.append(field) %} - {%- endfor %} - - {%- for i in range(0,num_valid_fields) %} - {%- set field = valid_fields[i] %} - {%- set valid_levels = fields_levels_threshes[i][field].keys()|list %} - {%- set valid_levels_threshes = fields_levels_threshes[i][field] %} - - {%- for level, valid_threshes in valid_levels_threshes.items() %} - {%- if (input_level == 'all') or (input_level == level) %} - {%- if input_thresh not in valid_threshes %} - {%- set error_msg = '\n' ~ -'The specified threshold (input_thresh) is not in the list of valid\n' ~ -'thresholds (valid_threshes) for the current field (field) and level\n' ~ -'(level) combination:\n' ~ -' field = \'' ~ field ~ '\'\n' ~ -' level = \'' ~ level ~ '\'\n' ~ -' valid_threshes = ' ~ valid_threshes ~ '\n' -' input_thresh = \'' ~ input_thresh ~ '\'' %} -'input_thresh must be set to the string \'all\' (to include in the METplus\n' ~ -'configuration file all thresholds for each valid combination of field and\n' ~ -'level), to the string \'none\' (to include no threshold information in the\n' ~ -'METplus configuration file), or to one of the elements in valid_threshes\n' ~ -'(to include only that specific threshold in the METplus configuration file).' %} - {{print_err_and_quit(error_msg)}} - {%- endif %} - {%- endif %} - - {%- endfor %} - - {%- endfor %} - +{%- macro check_field_group(valid_field_groups, input_field_group) %} + {%- if input_field_group not in valid_field_groups %} + {%- set error_msg = '\n' ~ + 'The specified input field group (input_field_group) is not in the list of\n' ~ + 'valid field groups (valid_field_groups):\n' ~ + ' input_field_group = \'' ~ input_field_group ~ '\'\n' ~ + ' valid_field_groups = ' ~ valid_field_groups ~ '\n' ~ + 'Reset input_field_group to one of the elements in valid_field_groups and\n' ~ + 'rerun.' %} + {{print_err_and_quit(error_msg)}} {%- endif %} - {%- endmacro %} + {#- This macro checks whether, for the given field, the list of thresholds for all levels are identical. If not, it prints out an error message and errors out. #} {%- macro check_for_identical_threshes_by_level(field, levels_threshes) %} - {%- set avail_levels = levels_threshes[field].keys()|list %} + {%- set avail_levels = levels_threshes.keys()|list %} {%- set num_avail_levels = avail_levels|length %} - {%- set threshes_by_avail_level = levels_threshes[field].values()|list %} + {%- set threshes_by_avail_level = levels_threshes.values()|list %} {%- for i in range(1,num_avail_levels) %} {%- set level = avail_levels[i-1] %} {%- set threshes = threshes_by_avail_level[i-1] %} @@ -135,8 +63,8 @@ and errors out. {%- if (threshes_next != threshes) %} {%- set error_msg = '\n\n' ~ 'For the given field (field), the set of thresholds for the next level\n' ~ -'(threshes_next, level_next) is not equal to that of the current level\n' ~ -'(threshes, level) (note that order of thresholds matters here):\n' ~ +'(level_next, threshes_next) is not equal to that of the current level\n' ~ +'(level, threshes) (note that order of thresholds matters here):\n' ~ ' field = \'' ~ field ~ '\'\n' ~ ' num_avail_levels = ' ~ num_avail_levels ~ '\n' ~ ' level = \'' ~ level ~ '\'\n' ~ diff --git a/parm/metplus/vx_config_det.yaml b/parm/metplus/vx_config_det.yaml index 4c721176c6..8ea3fd5e13 100644 --- a/parm/metplus/vx_config_det.yaml +++ b/parm/metplus/vx_config_det.yaml @@ -91,114 +91,118 @@ ADPSFC: L0%%Z0: ['ge1.0%%ge174&&le176'] ADPUPA: TMP: - P1000: [] - P925: [] - P850: [] - P700: [] - P500: [] - P400: [] - P300: [] - P250: [] - P200: [] - P150: [] - P100: [] - P50: [] - P20: [] - P10: [] + P1000: &adpupa_tmp_threshes + [] + P925: *adpupa_tmp_threshes + P850: *adpupa_tmp_threshes + P700: *adpupa_tmp_threshes + P500: *adpupa_tmp_threshes + P400: *adpupa_tmp_threshes + P300: *adpupa_tmp_threshes + P250: *adpupa_tmp_threshes + P200: *adpupa_tmp_threshes + P150: *adpupa_tmp_threshes + P100: *adpupa_tmp_threshes + P50: *adpupa_tmp_threshes + P20: *adpupa_tmp_threshes + P10: *adpupa_tmp_threshes RH: - P1000: [] - P925: [] - P850: [] - P700: [] - P500: [] - P400: [] - P300: [] - P250: [] + P1000: &adpupa_rh_threshes + [] + P925: *adpupa_rh_threshes + P850: *adpupa_rh_threshes + P700: *adpupa_rh_threshes + P500: *adpupa_rh_threshes + P400: *adpupa_rh_threshes + P300: *adpupa_rh_threshes + P250: *adpupa_rh_threshes DPT: - P1000: [] - P925: [] - P850: [] - P700: [] - P500: [] - P400: [] - P300: [] + P1000: &adpupa_dpt_threshes + [] + P925: *adpupa_dpt_threshes + P850: *adpupa_dpt_threshes + P700: *adpupa_dpt_threshes + P500: *adpupa_dpt_threshes + P400: *adpupa_dpt_threshes + P300: *adpupa_dpt_threshes UGRD: - P1000: ['ge2.572'] - P925: ['ge2.572'] - P850: ['ge2.572'] - P700: ['ge2.572'] - P500: ['ge2.572'] - P400: ['ge2.572'] - P300: ['ge2.572'] - P250: ['ge2.572'] - P200: ['ge2.572'] - P150: ['ge2.572'] - P100: ['ge2.572'] - P50: ['ge2.572'] - P20: ['ge2.572'] - P10: ['ge2.572'] + P1000: &adpupa_ugrd_threshes + ['ge2.572'] + P925: *adpupa_ugrd_threshes + P850: *adpupa_ugrd_threshes + P700: *adpupa_ugrd_threshes + P500: *adpupa_ugrd_threshes + P400: *adpupa_ugrd_threshes + P300: *adpupa_ugrd_threshes + P250: *adpupa_ugrd_threshes + P200: *adpupa_ugrd_threshes + P150: *adpupa_ugrd_threshes + P100: *adpupa_ugrd_threshes + P50: *adpupa_ugrd_threshes + P20: *adpupa_ugrd_threshes + P10: *adpupa_ugrd_threshes VGRD: - P1000: ['ge2.572'] - P925: ['ge2.572'] - P850: ['ge2.572'] - P700: ['ge2.572'] - P500: ['ge2.572'] - P400: ['ge2.572'] - P300: ['ge2.572'] - P250: ['ge2.572'] - P200: ['ge2.572'] - P150: ['ge2.572'] - P100: ['ge2.572'] - P50: ['ge2.572'] - P20: ['ge2.572'] - P10: ['ge2.572'] + P1000: &adpupa_vgrd_threshes + ['ge2.572'] + P925: *adpupa_vgrd_threshes + P850: *adpupa_vgrd_threshes + P700: *adpupa_vgrd_threshes + P500: *adpupa_vgrd_threshes + P400: *adpupa_vgrd_threshes + P300: *adpupa_vgrd_threshes + P250: *adpupa_vgrd_threshes + P200: *adpupa_vgrd_threshes + P150: *adpupa_vgrd_threshes + P100: *adpupa_vgrd_threshes + P50: *adpupa_vgrd_threshes + P20: *adpupa_vgrd_threshes + P10: *adpupa_vgrd_threshes WIND: - P1000: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] - P925: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] - P850: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] - P700: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] - P500: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] - P400: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] - P300: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] - P250: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] - P200: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] - P150: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] - P100: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] - P50: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] - P20: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] - P10: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] + P1000: &adpupa_wind_threshes + ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] + P925: *adpupa_wind_threshes + P850: *adpupa_wind_threshes + P700: *adpupa_wind_threshes + P500: *adpupa_wind_threshes + P400: *adpupa_wind_threshes + P300: *adpupa_wind_threshes + P250: *adpupa_wind_threshes + P200: *adpupa_wind_threshes + P150: *adpupa_wind_threshes + P100: *adpupa_wind_threshes + P50: *adpupa_wind_threshes + P20: *adpupa_wind_threshes + P10: *adpupa_wind_threshes HGT: - P1000: [] - P950: [] - P925: [] - P850: [] - P700: [] - P500: [] - P400: [] - P300: [] - P250: [] - P200: [] - P150: [] - P100: [] - P50: [] - P20: [] - P10: [] + P1000: &adpupa_hgt_threshes + [] + P950: *adpupa_hgt_threshes + P925: *adpupa_hgt_threshes + P850: *adpupa_hgt_threshes + P700: *adpupa_hgt_threshes + P500: *adpupa_hgt_threshes + P400: *adpupa_hgt_threshes + P300: *adpupa_hgt_threshes + P250: *adpupa_hgt_threshes + P200: *adpupa_hgt_threshes + P150: *adpupa_hgt_threshes + P100: *adpupa_hgt_threshes + P50: *adpupa_hgt_threshes + P20: *adpupa_hgt_threshes + P10: *adpupa_hgt_threshes SPFH: - P1000: [] - P850: [] - P700: [] - P500: [] - P400: [] - P300: [] + P1000: &adpupa_spfh_threshes + [] + P850: *adpupa_spfh_threshes + P700: *adpupa_spfh_threshes + P500: *adpupa_spfh_threshes + P400: *adpupa_spfh_threshes + P300: *adpupa_spfh_threshes CAPE: - L0%%L0-100000: - ['gt500', 'gt1000', 'gt1500', 'gt2000', 'gt3000', 'gt4000'] + L0%%L0-100000: ['gt500', 'gt1000', 'gt1500', 'gt2000', 'gt3000', 'gt4000'] HPBL%%PBL: - Z0%%L0: - [] + Z0%%L0: [] HGT%%PBL: - L0: - [] + L0: [] CAPE%%MLCAPE: - L0-90%%L0: - ['gt500', 'gt1000', 'gt1500', 'gt2000', 'gt3000', 'gt4000'] + L0-90%%L0: ['gt500', 'gt1000', 'gt1500', 'gt2000', 'gt3000', 'gt4000'] diff --git a/parm/wflow/verify_det.yaml b/parm/wflow/verify_det.yaml index 79f04eeaaa..e82d7c61e1 100644 --- a/parm/wflow/verify_det.yaml +++ b/parm/wflow/verify_det.yaml @@ -21,18 +21,6 @@ default_task_verify_det: &default_task_verify_det queue: '&QUEUE_DEFAULT;' walltime: 00:30:00 -task_parse_vx_config_det: - <<: *default_task_verify_det - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_PARSE_VX_CONFIG"' - envars: - <<: *default_vars - DET_OR_ENS: 'det' - join: !cycstr '&LOGDIR;/{{ jobname }}&LOGEXT;' - walltime: 00:05:00 - # No dependencies are needed for this task because as long as any deterministic - # verification tasks are going to be run (i.e. as long as this configuration - # file is included in the workflow), then this task must be launched. - metatask_GridStat_CCPA_all_accums_all_mems: var: ACCUM_HH: '{% for ah in verification.VX_APCP_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' @@ -64,9 +52,6 @@ metatask_GridStat_CCPA_all_accums_all_mems: taskdep_pcpcombine_fcst: attrs: task: run_MET_PcpCombine_fcst_APCP#ACCUM_HH#h_mem#mem# - taskdep_parse_vx_config_det: - attrs: - task: parse_vx_config_det metatask_GridStat_NOHRSC_all_accums_all_mems: var: @@ -99,9 +84,6 @@ metatask_GridStat_NOHRSC_all_accums_all_mems: taskdep_pcpcombine_fcst: attrs: task: run_MET_PcpCombine_fcst_ASNOW#ACCUM_HH#h_mem#mem# - taskdep_parse_vx_config_det: - attrs: - task: parse_vx_config_det metatask_GridStat_MRMS_all_mems: var: @@ -133,9 +115,6 @@ metatask_GridStat_MRMS_all_mems: attrs: age: 00:00:00:30 text: !cycstr '{{ workflow.EXPTDIR }}/@Y@m@d@H/post_files_exist_mem#mem#.txt' - taskdep_parse_vx_config_det: - attrs: - task: parse_vx_config_det metatask_PointStat_NDAS_all_mems: var: @@ -167,6 +146,3 @@ metatask_PointStat_NDAS_all_mems: attrs: age: 00:00:00:30 text: !cycstr '{{ workflow.EXPTDIR }}/@Y@m@d@H/post_files_exist_mem#mem#.txt' - taskdep_parse_vx_config_det: - attrs: - task: parse_vx_config_det diff --git a/parm/wflow/verify_ens.yaml b/parm/wflow/verify_ens.yaml index 3f7638587d..18b23a1eb0 100644 --- a/parm/wflow/verify_ens.yaml +++ b/parm/wflow/verify_ens.yaml @@ -21,18 +21,6 @@ default_task_verify_ens: &default_task_verify_ens queue: '&QUEUE_DEFAULT;' walltime: 01:00:00 -task_parse_vx_config_ens: - <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_PARSE_VX_CONFIG"' - envars: - <<: *default_vars - DET_OR_ENS: 'ens' - join: !cycstr '&LOGDIR;/{{ jobname }}&LOGEXT;' - walltime: 00:05:00 - # No dependencies are needed for this task because as long as any ensemble - # verification tasks are going to be run (i.e. as long as this configuration - # file is included in the workflow), then this task must be launched. - metatask_GenEnsProd_EnsembleStat_CCPA: var: ACCUM_HH: '{% for ah in verification.VX_APCP_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' @@ -59,9 +47,6 @@ metatask_GenEnsProd_EnsembleStat_CCPA: metataskdep_pcpcombine_fcst: attrs: metatask: PcpCombine_fcst_APCP#ACCUM_HH#h_all_mems - taskdep_parse_vx_config_ens: - attrs: - task: parse_vx_config_ens task_run_MET_EnsembleStat_vx_APCP#ACCUM_HH#h: <<: *task_GenEnsProd_CCPA envars: @@ -96,9 +81,6 @@ metatask_GenEnsProd_EnsembleStat_NOHRSC: metataskdep_pcpcombine_fcst: attrs: metatask: PcpCombine_fcst_ASNOW#ACCUM_HH#h_all_mems - taskdep_parse_vx_config_ens: - attrs: - task: parse_vx_config_ens task_run_MET_EnsembleStat_vx_ASNOW#ACCUM_HH#h: <<: *task_GenEnsProd_NOHRSC envars: @@ -134,9 +116,6 @@ metatask_GenEnsProd_EnsembleStat_MRMS: metataskdep_check_post_output: &check_post_output attrs: metatask: check_post_output_all_mems - taskdep_parse_vx_config_ens: - attrs: - task: parse_vx_config_ens task_run_MET_EnsembleStat_vx_#VAR#: <<: *task_GenEnsProd_MRMS envars: @@ -179,9 +158,6 @@ metatask_GenEnsProd_EnsembleStat_NDAS: task: run_MET_Pb2nc_obs metataskdep_check_post_output: <<: *check_post_output - taskdep_parse_vx_config_ens: - attrs: - task: parse_vx_config_ens task_run_MET_EnsembleStat_vx_#VAR#: <<: *task_GenEnsProd_NDAS envars: diff --git a/scripts/exregional_parse_vx_config.sh b/scripts/exregional_parse_vx_config.sh deleted file mode 100755 index 13632c7e53..0000000000 --- a/scripts/exregional_parse_vx_config.sh +++ /dev/null @@ -1,94 +0,0 @@ -#!/usr/bin/env bash - -# -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh -source_config_for_task "task_run_met_pcpcombine|task_run_post" ${GLOBAL_VAR_DEFNS_FP} -# -#----------------------------------------------------------------------- -# -# Source files defining auxiliary functions for verification. -# -#----------------------------------------------------------------------- -# -. $USHdir/set_vx_fhr_list.sh -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the ex-script for the task that reads in the \"coupled\" yaml -verification (vx) configuration file (python dictionary) and generates -from it two \"decoupled\" vx configuration dictionaries, one for forecasts -and another for observations. The task then writes these two decoupled -dictionaries to a new configuration file in the experiment directory -that can be read by downstream vx tasks. -========================================================================" -# -#----------------------------------------------------------------------- -# -# Call python script to generate vx configuration file containing -# separate vx configuration dictionaries for forecasts and observations. -# -#----------------------------------------------------------------------- -# -python3 ${USHdir}/metplus/decouple_fcst_obs_vx_config.py \ - --vx_type "${DET_OR_ENS}" \ - --outfile_type "txt" \ - --outdir "${EXPTDIR}" -# -#----------------------------------------------------------------------- -# -# Print message indicating successful completion of script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Done extracting vx configuration. - -Exiting script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" -========================================================================" -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 diff --git a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh index 529d8d92cc..93caeaa7f2 100755 --- a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh +++ b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh @@ -296,9 +296,12 @@ metplus_log_fn="metplus.log.${metplus_log_bn}" #----------------------------------------------------------------------- # det_or_ens="ens" -vx_config_output_fn="vx_config_${det_or_ens}.txt" -vx_config_output_fp="${EXPTDIR}/${vx_config_output_fn}" -vx_config_dict=$(<"${vx_config_output_fp}") +vx_config_fn="vx_config_${det_or_ens}.yaml" +vx_config_fp="${METPLUS_CONF}/${vx_config_fn}" +vx_config_dict=$(<"${vx_config_fp}") +# Indent each line of vx_config_dict so that it is aligned properly when +# included in the yaml-formatted variable "settings" below. +vx_config_dict=$( printf "%s\n" "${vx_config_dict}" | sed 's/^/ /' ) # #----------------------------------------------------------------------- # @@ -319,50 +322,54 @@ settings="\ # # MET/METplus information. # - 'metplus_tool_name': '${metplus_tool_name}' - 'MetplusToolName': '${MetplusToolName}' - 'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}' - 'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}' +'metplus_tool_name': '${metplus_tool_name}' +'MetplusToolName': '${MetplusToolName}' +'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}' +'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}' # # Date and forecast hour information. # - 'cdate': '$CDATE' - 'fhr_list': '${FHR_LIST}' +'cdate': '$CDATE' +'fhr_list': '${FHR_LIST}' # # Input and output directory/file information. # - 'metplus_config_fn': '${metplus_config_fn:-}' - 'metplus_log_fn': '${metplus_log_fn:-}' - 'obs_input_dir': '${OBS_INPUT_DIR:-}' - 'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' - 'fcst_input_dir': '${FCST_INPUT_DIR:-}' - 'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' - 'output_base': '${OUTPUT_BASE}' - 'output_dir': '${OUTPUT_DIR}' - 'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' - 'staging_dir': '${STAGING_DIR}' - 'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}' +'metplus_config_fn': '${metplus_config_fn:-}' +'metplus_log_fn': '${metplus_log_fn:-}' +'obs_input_dir': '${OBS_INPUT_DIR:-}' +'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' +'fcst_input_dir': '${FCST_INPUT_DIR:-}' +'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' +'output_base': '${OUTPUT_BASE}' +'output_dir': '${OUTPUT_DIR}' +'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' +'staging_dir': '${STAGING_DIR}' +'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}' # # Ensemble and member-specific information. # - 'num_ens_members': '${NUM_ENS_MEMBERS}' - 'ensmem_name': '${ensmem_name:-}' - 'time_lag': '${time_lag:-}' +'num_ens_members': '${NUM_ENS_MEMBERS}' +'ensmem_name': '${ensmem_name:-}' +'time_lag': '${time_lag:-}' # # Field information. # - 'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}' - 'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}' - 'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' - 'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' - 'obtype': '${OBTYPE}' - 'accum_hh': '${ACCUM_HH:-}' - 'accum_no_pad': '${ACCUM_NO_PAD:-}' - 'metplus_templates_dir': '${METPLUS_CONF:-}' - 'input_field_group': '${VAR:-}' - 'input_level_fcst': '${FCST_LEVEL:-}' - 'input_thresh_fcst': '${FCST_THRESH:-}' - 'vx_config_dict': ${vx_config_dict:-} +'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}' +'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}' +'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' +'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' +'obtype': '${OBTYPE}' +'accum_hh': '${ACCUM_HH:-}' +'accum_no_pad': '${ACCUM_NO_PAD:-}' +'metplus_templates_dir': '${METPLUS_CONF:-}' +'input_field_group': '${VAR:-}' +'input_level_fcst': '${FCST_LEVEL:-}' +'input_thresh_fcst': '${FCST_THRESH:-}' +# +# Verification configuration dictionary. +# +'vx_config_dict': +${vx_config_dict:-} " # Render the template to create a METplus configuration file diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh index b8f0c49fec..4f871e6e1b 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh @@ -293,9 +293,12 @@ metplus_log_fn="metplus.log.${metplus_log_bn}" #----------------------------------------------------------------------- # det_or_ens="det" -vx_config_output_fn="vx_config_${det_or_ens}.txt" -vx_config_output_fp="${EXPTDIR}/${vx_config_output_fn}" -vx_config_dict=$(<"${vx_config_output_fp}") +vx_config_fn="vx_config_${det_or_ens}.yaml" +vx_config_fp="${METPLUS_CONF}/${vx_config_fn}" +vx_config_dict=$(<"${vx_config_fp}") +# Indent each line of vx_config_dict so that it is aligned properly when +# included in the yaml-formatted variable "settings" below. +vx_config_dict=$( printf "%s\n" "${vx_config_dict}" | sed 's/^/ /' ) # #----------------------------------------------------------------------- # @@ -316,50 +319,54 @@ settings="\ # # MET/METplus information. # - 'metplus_tool_name': '${metplus_tool_name}' - 'MetplusToolName': '${MetplusToolName}' - 'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}' - 'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}' +'metplus_tool_name': '${metplus_tool_name}' +'MetplusToolName': '${MetplusToolName}' +'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}' +'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}' # # Date and forecast hour information. # - 'cdate': '$CDATE' - 'fhr_list': '${FHR_LIST}' +'cdate': '$CDATE' +'fhr_list': '${FHR_LIST}' # # Input and output directory/file information. # - 'metplus_config_fn': '${metplus_config_fn:-}' - 'metplus_log_fn': '${metplus_log_fn:-}' - 'obs_input_dir': '${OBS_INPUT_DIR:-}' - 'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' - 'fcst_input_dir': '${FCST_INPUT_DIR:-}' - 'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' - 'output_base': '${OUTPUT_BASE}' - 'output_dir': '${OUTPUT_DIR}' - 'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' - 'staging_dir': '${STAGING_DIR}' - 'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}' +'metplus_config_fn': '${metplus_config_fn:-}' +'metplus_log_fn': '${metplus_log_fn:-}' +'obs_input_dir': '${OBS_INPUT_DIR:-}' +'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' +'fcst_input_dir': '${FCST_INPUT_DIR:-}' +'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' +'output_base': '${OUTPUT_BASE}' +'output_dir': '${OUTPUT_DIR}' +'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' +'staging_dir': '${STAGING_DIR}' +'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}' # # Ensemble and member-specific information. # - 'num_ens_members': '${NUM_ENS_MEMBERS}' - 'ensmem_name': '${ensmem_name:-}' - 'time_lag': '${time_lag:-}' +'num_ens_members': '${NUM_ENS_MEMBERS}' +'ensmem_name': '${ensmem_name:-}' +'time_lag': '${time_lag:-}' # # Field information. # - 'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}' - 'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}' - 'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' - 'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' - 'obtype': '${OBTYPE}' - 'accum_hh': '${ACCUM_HH:-}' - 'accum_no_pad': '${ACCUM_NO_PAD:-}' - 'metplus_templates_dir': '${METPLUS_CONF:-}' - 'input_field_group': '${VAR:-}' - 'input_level_fcst': '${FCST_LEVEL:-}' - 'input_thresh_fcst': '${FCST_THRESH:-}' - 'vx_config_dict': ${vx_config_dict:-} +'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}' +'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}' +'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' +'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' +'obtype': '${OBTYPE}' +'accum_hh': '${ACCUM_HH:-}' +'accum_no_pad': '${ACCUM_NO_PAD:-}' +'metplus_templates_dir': '${METPLUS_CONF:-}' +'input_field_group': '${VAR:-}' +'input_level_fcst': '${FCST_LEVEL:-}' +'input_thresh_fcst': '${FCST_THRESH:-}' +# +# Verification configuration dictionary. +# +'vx_config_dict': +${vx_config_dict:-} " # Render the template to create a METplus configuration file diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh index 9939daaf76..6e4a4ff33f 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh @@ -251,9 +251,12 @@ metplus_log_fn="metplus.log.${metplus_log_bn}" #----------------------------------------------------------------------- # det_or_ens="ens" -vx_config_output_fn="vx_config_${det_or_ens}.txt" -vx_config_output_fp="${EXPTDIR}/${vx_config_output_fn}" -vx_config_dict=$(<"${vx_config_output_fp}") +vx_config_fn="vx_config_${det_or_ens}.yaml" +vx_config_fp="${METPLUS_CONF}/${vx_config_fn}" +vx_config_dict=$(<"${vx_config_fp}") +# Indent each line of vx_config_dict so that it is aligned properly when +# included in the yaml-formatted variable "settings" below. +vx_config_dict=$( printf "%s\n" "${vx_config_dict}" | sed 's/^/ /' ) # #----------------------------------------------------------------------- # @@ -274,50 +277,54 @@ settings="\ # # MET/METplus information. # - 'metplus_tool_name': '${metplus_tool_name}' - 'MetplusToolName': '${MetplusToolName}' - 'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}' - 'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}' +'metplus_tool_name': '${metplus_tool_name}' +'MetplusToolName': '${MetplusToolName}' +'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}' +'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}' # # Date and forecast hour information. # - 'cdate': '$CDATE' - 'fhr_list': '${FHR_LIST}' +'cdate': '$CDATE' +'fhr_list': '${FHR_LIST}' # # Input and output directory/file information. # - 'metplus_config_fn': '${metplus_config_fn:-}' - 'metplus_log_fn': '${metplus_log_fn:-}' - 'obs_input_dir': '${OBS_INPUT_DIR:-}' - 'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' - 'fcst_input_dir': '${FCST_INPUT_DIR:-}' - 'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' - 'output_base': '${OUTPUT_BASE}' - 'output_dir': '${OUTPUT_DIR}' - 'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' - 'staging_dir': '${STAGING_DIR}' - 'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}' +'metplus_config_fn': '${metplus_config_fn:-}' +'metplus_log_fn': '${metplus_log_fn:-}' +'obs_input_dir': '${OBS_INPUT_DIR:-}' +'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' +'fcst_input_dir': '${FCST_INPUT_DIR:-}' +'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' +'output_base': '${OUTPUT_BASE}' +'output_dir': '${OUTPUT_DIR}' +'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' +'staging_dir': '${STAGING_DIR}' +'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}' # # Ensemble and member-specific information. # - 'num_ens_members': '${NUM_ENS_MEMBERS}' - 'ensmem_name': '${ensmem_name:-}' - 'time_lag': '${time_lag:-}' +'num_ens_members': '${NUM_ENS_MEMBERS}' +'ensmem_name': '${ensmem_name:-}' +'time_lag': '${time_lag:-}' # # Field information. # - 'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}' - 'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}' - 'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' - 'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' - 'obtype': '${OBTYPE}' - 'accum_hh': '${ACCUM_HH:-}' - 'accum_no_pad': '${ACCUM_NO_PAD:-}' - 'metplus_templates_dir': '${METPLUS_CONF:-}' - 'input_field_group': '${VAR:-}' - 'input_level_fcst': '${FCST_LEVEL:-}' - 'input_thresh_fcst': '${FCST_THRESH:-}' - 'vx_config_dict': ${vx_config_dict:-} +'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}' +'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}' +'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' +'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' +'obtype': '${OBTYPE}' +'accum_hh': '${ACCUM_HH:-}' +'accum_no_pad': '${ACCUM_NO_PAD:-}' +'metplus_templates_dir': '${METPLUS_CONF:-}' +'input_field_group': '${VAR:-}' +'input_level_fcst': '${FCST_LEVEL:-}' +'input_thresh_fcst': '${FCST_THRESH:-}' +# +# Verification configuration dictionary. +# +'vx_config_dict': +${vx_config_dict:-} " # Render the template to create a METplus configuration file diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh index 33d00b1d37..924d321ec3 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh @@ -250,9 +250,12 @@ metplus_log_fn="metplus.log.${metplus_log_bn}" #----------------------------------------------------------------------- # det_or_ens="ens" -vx_config_output_fn="vx_config_${det_or_ens}.txt" -vx_config_output_fp="${EXPTDIR}/${vx_config_output_fn}" -vx_config_dict=$(<"${vx_config_output_fp}") +vx_config_fn="vx_config_${det_or_ens}.yaml" +vx_config_fp="${METPLUS_CONF}/${vx_config_fn}" +vx_config_dict=$(<"${vx_config_fp}") +# Indent each line of vx_config_dict so that it is aligned properly when +# included in the yaml-formatted variable "settings" below. +vx_config_dict=$( printf "%s\n" "${vx_config_dict}" | sed 's/^/ /' ) # #----------------------------------------------------------------------- # @@ -273,50 +276,54 @@ settings="\ # # MET/METplus information. # - 'metplus_tool_name': '${metplus_tool_name}' - 'MetplusToolName': '${MetplusToolName}' - 'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}' - 'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}' +'metplus_tool_name': '${metplus_tool_name}' +'MetplusToolName': '${MetplusToolName}' +'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}' +'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}' # # Date and forecast hour information. # - 'cdate': '$CDATE' - 'fhr_list': '${FHR_LIST}' +'cdate': '$CDATE' +'fhr_list': '${FHR_LIST}' # # Input and output directory/file information. # - 'metplus_config_fn': '${metplus_config_fn:-}' - 'metplus_log_fn': '${metplus_log_fn:-}' - 'obs_input_dir': '${OBS_INPUT_DIR:-}' - 'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' - 'fcst_input_dir': '${FCST_INPUT_DIR:-}' - 'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' - 'output_base': '${OUTPUT_BASE}' - 'output_dir': '${OUTPUT_DIR}' - 'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' - 'staging_dir': '${STAGING_DIR}' - 'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}' +'metplus_config_fn': '${metplus_config_fn:-}' +'metplus_log_fn': '${metplus_log_fn:-}' +'obs_input_dir': '${OBS_INPUT_DIR:-}' +'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' +'fcst_input_dir': '${FCST_INPUT_DIR:-}' +'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' +'output_base': '${OUTPUT_BASE}' +'output_dir': '${OUTPUT_DIR}' +'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' +'staging_dir': '${STAGING_DIR}' +'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}' # # Ensemble and member-specific information. # - 'num_ens_members': '${NUM_ENS_MEMBERS}' - 'ensmem_name': '${ensmem_name:-}' - 'time_lag': '${time_lag:-}' +'num_ens_members': '${NUM_ENS_MEMBERS}' +'ensmem_name': '${ensmem_name:-}' +'time_lag': '${time_lag:-}' # # Field information. # - 'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}' - 'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}' - 'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' - 'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' - 'obtype': '${OBTYPE}' - 'accum_hh': '${ACCUM_HH:-}' - 'accum_no_pad': '${ACCUM_NO_PAD:-}' - 'metplus_templates_dir': '${METPLUS_CONF:-}' - 'input_field_group': '${VAR:-}' - 'input_level_fcst': '${FCST_LEVEL:-}' - 'input_thresh_fcst': '${FCST_THRESH:-}' - 'vx_config_dict': ${vx_config_dict:-} +'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}' +'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}' +'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' +'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' +'obtype': '${OBTYPE}' +'accum_hh': '${ACCUM_HH:-}' +'accum_no_pad': '${ACCUM_NO_PAD:-}' +'metplus_templates_dir': '${METPLUS_CONF:-}' +'input_field_group': '${VAR:-}' +'input_level_fcst': '${FCST_LEVEL:-}' +'input_thresh_fcst': '${FCST_THRESH:-}' +# +# Verification configuration dictionary. +# +'vx_config_dict': +${vx_config_dict:-} " # Render the template to create a METplus configuration file diff --git a/ush/metplus/decouple_fcst_obs_vx_config.py b/ush/metplus/decouple_fcst_obs_vx_config.py deleted file mode 100755 index afa001859c..0000000000 --- a/ush/metplus/decouple_fcst_obs_vx_config.py +++ /dev/null @@ -1,436 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -import glob -import argparse -import yaml - -import logging -import textwrap -from textwrap import indent, dedent - -import pprint -import subprocess - -from pathlib import Path -file = Path(__file__).resolve() -home_dir = file.parents[2] -ush_dir = Path(os.path.join(home_dir, 'ush')).resolve() -sys.path.append(str(ush_dir)) - -from python_utils import ( - log_info, - load_config_file, -) - - -def get_pprint_str(var, indent_str=''): - """ - Function to format a python variable as a pretty-printed string and add - indentation. - - Arguments: - --------- - var: - A variable. - - indent_str: - String to be added to the beginning of each line of the pretty-printed - form of var. This usually consists of multiple space characters. - - Returns: - ------- - var_str: - Formatted string containing contents of variable. - """ - - var_str = pprint.pformat(var, compact=True, sort_dicts=False) - var_str = var_str.splitlines(True) - var_str = [indent_str + s for s in var_str] - var_str = ''.join(var_str) - - return var_str - - -def create_pprinted_msg(vars_dict, indent_str='', add_nl_after_varname=False): - """ - Function to create an output message (string) containing one or more - variables' names, with each name followed possibly by a newline, an equal - sign, and the pretty-printed value of the variable. Each variable name - starts on a new line. - - Arguments: - --------- - vars_dict: - Dictionary containing the variable names (the keys) and their values - (the values). - - indent_str: - String to be added to the beginning of each line of the string before - returning it. This usually consists of multiple space characters. - - add_nl_after_varname: - Flag indicating whether to add a newline after the variable name (and - before the equal sign). - - Returns: - ------- - vars_str: - Formatted string containing contents of variable. - """ - - space_or_nl = ' ' - one_or_zero = 1 - if add_nl_after_varname: - space_or_nl = '\n' - one_or_zero = 0 - - vars_str = '' - for var_name, var_value in vars_dict.items(): - pprint_indent_str = ' '*(2 + one_or_zero*(1 + len(var_name))) - tmp = f'{var_name}' + space_or_nl + '= ' + \ - get_pprint_str(var_value, pprint_indent_str).lstrip() - vars_str = '\n'.join([vars_str, tmp]) - - vars_str = indent(vars_str, indent_str) - - return vars_str - - -def extract_fcst_obs_vals_from_cpld(item_cpld): - """ - Function to parse the "coupled" value of an item (obtained from the coupled - verification (vx) configuration dictionary) to extract from it the item's - value for forecasts and its value for observations. The coupled item - (item_cpld) is a string that may correspond to a field name, a level, or - a threshold. If item_cpld has the form - - item_cpld = str1 + delim_str + str2 - - where delim_str is a delimiter string (e.g. delim_str may be set to '%%'), - then the forecast and observation values of the item are given by - - item_fcst = str1 - item_obs = str2 - - For example, if delim_str = '%%' and - - item_cpld = 'ABCD%%EFGH' - - then - - item_fcst = 'ABCD' - item_obs = 'EFGH' - - Alternatively, if delim_str is not be a substring within item_cpld, both - return values will be identical to the input. - - Arguments: - --------- - item_cpld - String representing a "coupled" item (field name, level, or threshold). - containing both the item's forecast value and its observations value. - - Returns: - ------- - item_fcst, item_obs: - Strings containing the values of the item for forecasts and observations, - respectively. - """ - - # Set the delimiter string. - delim_str = '%%' - - # Parse the string containing the coupled value of the item to extract - # its forecast and observation values. - if delim_str in item_cpld: - if item_cpld.count(delim_str) == 1: - item_fcst, item_obs = item_cpld.split(delim_str) - else: - msg = dedent(f""" - The delimiter string (delim_str) appears more than once in the current - coupled item value (item_cpld): - delim_str = {get_pprint_str(delim_str)} - item_cpld = {get_pprint_str(item_cpld)} - Stopping. - """) - logging.error(msg) - raise ValueError(msg) - else: - item_fcst = item_cpld - item_obs = item_cpld - - return item_fcst, item_obs - - -def decouple_fcst_obs_vx_config(vx_type, outfile_type, outdir='./', log_lvl='info', log_fp=''): - """ - This function reads from a yaml configuration file the coupled verification - (vx) configuration dictionary and parses it (i.e. decouples its contents) - to produce two new configuration dictionaries -- one for forecasts and - another for observations. Here, by "coupled" dictionary, we mean one that - contains items (keys and values) that store the forecast and observation - values for various quantities (field names, levels, and thresholds) in - combined/coupled form. (See the documentation for the function - extract_fcst_obs_vals_from_cpld() for more details of this coupled form.) - This function then writes the two separate (decoupled) vx configuration - dictionaries (one for forecasts and the other for observations) to a file. - - Arguments: - --------- - vx_type: - Type of verification for which the coupled dictionary to be read in - applies. This can be 'det' (for deterministic verification) or 'ens' - (for ensemble verification). - outfile_type: - Type of the output file. This can be 'txt' (for the output to be saved - in a pretty-printed text file) or 'yaml' (for the output to be saved in - a yaml-formatted file. Here, the "output" consists of the two separate - vx configuration files (one for forecasts and another for observations). - outdir: - The directory in which to save the output file. - log_lvl: - The logging level to use. - log_fp: - Path to the log file. Default is an empty string, so that logging output - is sent to stdout. - - Returns: - ------- - None - """ - - # Set up logging. - log_level = str.upper(log_lvl) - fmt = "[%(levelname)s:%(name)s: %(filename)s, line %(lineno)s: %(funcName)s()] %(message)s" - if log_fp: - logging.basicConfig(level=log_level, format=fmt, filename=log_fp, filemode='w') - else: - logging.basicConfig(level=log_level, format=fmt) - logging.basicConfig(level=log_level) - - # Load the yaml file containing the coupled forecast-and-observations - # verification (vx) configuration dictionary. - metplus_conf_dir = Path(os.path.join(home_dir, 'parm', 'metplus')).resolve() - config_fn = ''.join(['vx_config_', vx_type, '.yaml']) - config_fp = Path(os.path.join(metplus_conf_dir, config_fn)).resolve() - fgs_fields_levels_threshes_cpld = load_config_file(config_fp) - - msg = create_pprinted_msg( - vars_dict = {'fgs_fields_levels_threshes_cpld': fgs_fields_levels_threshes_cpld}, - indent_str = ' '*0, - add_nl_after_varname = True) - logging.debug(msg) - - # Loop through the field groups in the coupled vx configuration dictionary - # and generate two separate vx configuration dictionaries, one for forecasts - # and another for observations. - fgs_fields_levels_threshes_fcst = {} - fgs_fields_levels_threshes_obs = {} - indent_incr = 4 - indent_size = indent_incr - indent_str = ' '*indent_size - for field_group, fields_levels_threshes_cpld in fgs_fields_levels_threshes_cpld.items(): - - msg = create_pprinted_msg( - vars_dict = {'field_group': field_group}, - indent_str = indent_str) - logging.debug(msg) - - # Loop over the field names associated with the current field group. - # - # Note that the following variables have to be lists of dictionaries - # (where each dictionary contains only one key-value pair) instead of - # dictionaries because the field names might be repeated and thus cannot - # be used as dictionary keys. For example, in the ADPSFC field group, - # the forecast fields CRAIN, CSNOW, CFRZR, and CICEP all have the - # corresponding observation field PRWE but with different thresholds, - # so although fields_levels_threshes_fcst could be a dictionary with - # CRAIN, CSNOW, CFRZR, and CICEP as keys, fields_levels_threshes_obs - # cannot be a dictionary because the string PRWE cannot be used as a key - # more than once. - fields_levels_threshes_fcst = [] - fields_levels_threshes_obs = [] - indent_size += indent_incr - indent_str = ' '*indent_size - for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items(): - - msg = create_pprinted_msg( - vars_dict = {'field_cpld': field_cpld}, - indent_str = indent_str) - logging.debug(msg) - - # Parse the current coupled field name to extract the forecast and - # observation field names. - field_fcst, field_obs = extract_fcst_obs_vals_from_cpld(field_cpld) - - msg = create_pprinted_msg( - vars_dict = {'field_fcst': field_fcst, 'field_obs': field_obs}, - indent_str = indent_str) - logging.debug(msg) - - # Loop over the levels associated with the current field. - levels_threshes_fcst = {} - levels_threshes_obs = {} - indent_size += indent_incr - indent_str = ' '*indent_size - for level_cpld, threshes_cpld in levels_threshes_cpld.items(): - - msg = create_pprinted_msg( - vars_dict = {'level_cpld': level_cpld}, - indent_str = indent_str) - logging.debug(msg) - - # Parse the current coupled level to extract the forecast and observation - # levels. - level_fcst, level_obs = extract_fcst_obs_vals_from_cpld(level_cpld) - - msg = create_pprinted_msg( - vars_dict = {'level_fcst': level_fcst, 'level_obs': level_obs}, - indent_str = indent_str) - logging.debug(msg) - - # Loop over the thresholds associated with the current level. - threshes_fcst = [] - threshes_obs = [] - indent_size += indent_incr - indent_str = ' '*indent_size - for thresh_cpld in threshes_cpld: - - msg = create_pprinted_msg( - vars_dict = {'thresh_cpld': thresh_cpld}, - indent_str = indent_str) - logging.debug(msg) - - # Parse the current coupled threshold to extract the forecast and - # observation thresholds. - thresh_fcst, thresh_obs = extract_fcst_obs_vals_from_cpld(thresh_cpld) - - msg = create_pprinted_msg( - vars_dict = {'thresh_fcst': thresh_fcst, 'thresh_obs': thresh_obs}, - indent_str = indent_str) - logging.debug(msg) - - threshes_fcst.append(thresh_fcst) - threshes_obs.append(thresh_obs) - - indent_size -= indent_incr - indent_str = ' '*indent_size - msg = create_pprinted_msg( - vars_dict = {'threshes_fcst': threshes_fcst, - 'threshes_obs': threshes_obs}, - indent_str = indent_str, - add_nl_after_varname = True) - logging.debug(msg) - - levels_threshes_fcst[level_fcst] = threshes_fcst - levels_threshes_obs[level_obs] = threshes_obs - - indent_size -= indent_incr - indent_str = ' '*indent_size - msg = create_pprinted_msg( - vars_dict = {'levels_threshes_fcst': levels_threshes_fcst, - 'levels_threshes_obs': levels_threshes_obs}, - indent_str = indent_str, - add_nl_after_varname = True) - logging.debug(msg) - - fields_levels_threshes_fcst.append({field_fcst: levels_threshes_fcst}) - fields_levels_threshes_obs.append({field_obs: levels_threshes_obs}) - - indent_size -= indent_incr - indent_str = ' '*indent_size - msg = create_pprinted_msg( - vars_dict = {'fields_levels_threshes_fcst': fields_levels_threshes_fcst, - 'fields_levels_threshes_obs': fields_levels_threshes_obs}, - indent_str = indent_str, - add_nl_after_varname = True) - logging.debug(msg) - - fgs_fields_levels_threshes_fcst[field_group] = fields_levels_threshes_fcst - fgs_fields_levels_threshes_obs[field_group] = fields_levels_threshes_obs - - indent_size -= indent_incr - indent_str = ' '*indent_size - msg = create_pprinted_msg( - vars_dict = {'fgs_fields_levels_threshes_fcst': fgs_fields_levels_threshes_fcst, - 'fgs_fields_levels_threshes_obs': fgs_fields_levels_threshes_obs}, - indent_str = indent_str, - add_nl_after_varname = True) - logging.debug(msg) - - # We now have a verification configuration dictionary for forecasts and - # a separate one for the observations. To conveniently write these to a - # file, first place (wrap) them in a higher-level dictionary. - vx_config_dict = {'fcst': fgs_fields_levels_threshes_fcst, - 'obs': fgs_fields_levels_threshes_obs} - - # Write the contents of the higher-level dictionary to file. - output_fn = ''.join(['vx_config_', vx_type, '.', outfile_type]) - output_fp = Path(os.path.join(outdir, output_fn)).resolve() - with open(f'{output_fp}', 'w') as fn: - if outfile_type == 'txt': - dict_to_str = get_pprint_str(vx_config_dict, ' ') - fn.write(dict_to_str) - elif outfile_type == 'yaml': - yaml_vars = yaml.dump(vx_config_dict, fn) - - return None -# -# ----------------------------------------------------------------------- -# -# Call the function defined above. -# -# ----------------------------------------------------------------------- -# -if __name__ == "__main__": - - parser = argparse.ArgumentParser( - description='Read in and process verification configuration file' - ) - - default_vx_type = 'det' - parser.add_argument('--vx_type', - type=str, - required=True, - choices=['det', 'ens'], - default=default_vx_type, - help=dedent(f""" - String that determines whether to read in the deterministic or ensemble - verification configuration file. - """)) - - default_outfile_type = 'txt' - parser.add_argument('--outfile_type', - type=str, - required=True, - choices=['txt', 'yaml'], - default=default_outfile_type, - help=dedent(f""" - Type of output file. The output consists of a high-level dictionary - containing two keys: 'fcst' and 'obs'. The value of 'fcst' is the vx - configuration dictionary for forecasts, and the value of 'obs' is the vx - dictionary for observations. If outfile_type is set to 'txt', this high- - level dictionary is saved to a text file in a form that can be read in by - the SRW App's ex-scripts for the verification tasks. In particular, this - form contains the curly braces and brackets that define dictionaries and - lists in python code (but that would normally not appear in a yaml file). - If outfile_type is set to 'yaml', then the high-level dictionary is saved - to a yaml-formatted file. - """)) - - parser.add_argument('--outdir', - type=str, - required=False, - default='./', - help=dedent(f""" - Directory in which to place the output file containing the decoupled - (i.e. with forecast and observation information placed in separate data - structures) verifcation configuration information. - """)) - - args = parser.parse_args() - - decouple_fcst_obs_vx_config(vx_type=args.vx_type, outfile_type=args.outfile_type, outdir=args.outdir) - From 59c78fb14736e9f69938bfa2d74ded6f0f227832 Mon Sep 17 00:00:00 2001 From: EdwardSnyder-NOAA <96196752+EdwardSnyder-NOAA@users.noreply.github.com> Date: Wed, 15 May 2024 08:57:11 -0500 Subject: [PATCH 25/42] [develop] Add the remaining UFS Case Studies (#1081) Add the remaining UFS Case Studies to the SRW App as WE2E tests. These new tests were added to the comprehensive and coverage files as well. --- tests/WE2E/machine_suites/comprehensive | 5 +++ .../WE2E/machine_suites/comprehensive.derecho | 5 +++ .../machine_suites/comprehensive.noaacloud | 5 +++ tests/WE2E/machine_suites/comprehensive.orion | 5 +++ tests/WE2E/machine_suites/coverage.derecho | 2 + tests/WE2E/machine_suites/coverage.gaea | 1 + .../WE2E/machine_suites/coverage.hera.gnu.com | 1 + tests/WE2E/machine_suites/coverage.hercules | 1 + .../config.2019_hurricane_lorenzo.yaml | 38 +++++++++++++++++++ .../config.2019_memorial_day_heat_wave.yaml | 36 ++++++++++++++++++ ...onfig.2020_denver_radiation_inversion.yaml | 38 +++++++++++++++++++ .../config.2020_easter_storm.yaml | 38 +++++++++++++++++++ .../config.2020_jan_cold_blast.yaml | 38 +++++++++++++++++++ 13 files changed, 213 insertions(+) create mode 100644 tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_lorenzo.yaml create mode 100644 tests/WE2E/test_configs/ufs_case_studies/config.2019_memorial_day_heat_wave.yaml create mode 100644 tests/WE2E/test_configs/ufs_case_studies/config.2020_denver_radiation_inversion.yaml create mode 100644 tests/WE2E/test_configs/ufs_case_studies/config.2020_easter_storm.yaml create mode 100644 tests/WE2E/test_configs/ufs_case_studies/config.2020_jan_cold_blast.yaml diff --git a/tests/WE2E/machine_suites/comprehensive b/tests/WE2E/machine_suites/comprehensive index 3af6ae0db4..8c546918a0 100644 --- a/tests/WE2E/machine_suites/comprehensive +++ b/tests/WE2E/machine_suites/comprehensive @@ -2,6 +2,11 @@ 2020_CAPE 2019_hurricane_barry 2019_halloween_storm +2019_hurricane_lorenzo +2019_memorial_day_heat_wave +2020_denver_radiation_inversion +2020_easter_storm +2020_jan_cold_blast community custom_ESGgrid custom_ESGgrid_Central_Asia_3km diff --git a/tests/WE2E/machine_suites/comprehensive.derecho b/tests/WE2E/machine_suites/comprehensive.derecho index 9ce8d067ac..a28718a10a 100644 --- a/tests/WE2E/machine_suites/comprehensive.derecho +++ b/tests/WE2E/machine_suites/comprehensive.derecho @@ -2,6 +2,11 @@ 2020_CAPE 2019_hurricane_barry 2019_halloween_storm +2019_hurricane_lorenzo +2019_memorial_day_heat_wave +2020_denver_radiation_inversion +2020_easter_storm +2020_jan_cold_blast community custom_ESGgrid #custom_ESGgrid_Central_Asia_3km diff --git a/tests/WE2E/machine_suites/comprehensive.noaacloud b/tests/WE2E/machine_suites/comprehensive.noaacloud index 23c0aa8456..6c01bd70a8 100644 --- a/tests/WE2E/machine_suites/comprehensive.noaacloud +++ b/tests/WE2E/machine_suites/comprehensive.noaacloud @@ -56,6 +56,11 @@ specify_template_filenames 2020_CAPE 2019_hurricane_barry 2019_halloween_storm +2019_hurricane_lorenzo +2019_memorial_day_heat_wave +2020_denver_radiation_inversion +2020_easter_storm +2020_jan_cold_blast get_from_AWS_ics_GEFS_lbcs_GEFS_fmt_grib2_2022040400_ensemble_2mems get_from_NOMADS_ics_FV3GFS_lbcs_FV3GFS long_fcst diff --git a/tests/WE2E/machine_suites/comprehensive.orion b/tests/WE2E/machine_suites/comprehensive.orion index 739b4fff8e..ce71fe05db 100644 --- a/tests/WE2E/machine_suites/comprehensive.orion +++ b/tests/WE2E/machine_suites/comprehensive.orion @@ -2,6 +2,11 @@ 2020_CAPE 2019_hurricane_barry 2019_halloween_storm +2019_hurricane_lorenzo +2019_memorial_day_heat_wave +2020_denver_radiation_inversion +2020_easter_storm +2020_jan_cold_blast community custom_ESGgrid custom_ESGgrid_Central_Asia_3km diff --git a/tests/WE2E/machine_suites/coverage.derecho b/tests/WE2E/machine_suites/coverage.derecho index c2a770672e..a948c76033 100644 --- a/tests/WE2E/machine_suites/coverage.derecho +++ b/tests/WE2E/machine_suites/coverage.derecho @@ -7,3 +7,5 @@ grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_HRRR_suite_HRRR pregen_grid_orog_sfc_climo specify_template_filenames 2019_hurricane_barry +2019_memorial_day_heat_wave +2020_denver_radiation_inversion diff --git a/tests/WE2E/machine_suites/coverage.gaea b/tests/WE2E/machine_suites/coverage.gaea index e6aba6ea3d..970fdf4086 100644 --- a/tests/WE2E/machine_suites/coverage.gaea +++ b/tests/WE2E/machine_suites/coverage.gaea @@ -8,3 +8,4 @@ grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot 2020_CAPE +2020_easter_storm diff --git a/tests/WE2E/machine_suites/coverage.hera.gnu.com b/tests/WE2E/machine_suites/coverage.hera.gnu.com index 4c802781f9..c2018a6e78 100644 --- a/tests/WE2E/machine_suites/coverage.hera.gnu.com +++ b/tests/WE2E/machine_suites/coverage.hera.gnu.com @@ -8,3 +8,4 @@ long_fcst MET_verification_only_vx MET_ensemble_verification_only_vx_time_lag 2019_halloween_storm +2020_jan_cold_blast diff --git a/tests/WE2E/machine_suites/coverage.hercules b/tests/WE2E/machine_suites/coverage.hercules index 273de3108e..ec37d81a56 100644 --- a/tests/WE2E/machine_suites/coverage.hercules +++ b/tests/WE2E/machine_suites/coverage.hercules @@ -9,3 +9,4 @@ grid_RRFS_NA_13km_ics_FV3GFS_lbcs_FV3GFS_suite_RAP grid_SUBCONUS_Ind_3km_ics_NAM_lbcs_NAM_suite_GFS_v16 MET_verification_only_vx specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS +2019_hurricane_lorenzo diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_lorenzo.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_lorenzo.yaml new file mode 100644 index 0000000000..557607d810 --- /dev/null +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_lorenzo.yaml @@ -0,0 +1,38 @@ +metadata: + description: |- + This test is to ensure that the workflow running in community mode + completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16 + physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms + for 2019 Hurricane Lorenzo. + # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to + # download these files, which can delay the WE2E testing process. + # To capture the event, extend the FCST_LEN_HRS from 6 to 90. +user: + RUN_ENVIR: community +platform: + EXTRN_MDL_DATA_STORES: aws +workflow: + CCPP_PHYS_SUITE: FV3_GFS_v16 + PREDEF_GRID_NAME: RRFS_CONUS_13km + DATE_FIRST_CYCL: '2019092512' + DATE_LAST_CYCL: '2019092512' + FCST_LEN_HRS: 6 + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY + FV3GFS_FILE_FMT_ICS: nemsio +task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY + LBC_SPEC_INTVL_HRS: 3 + FV3GFS_FILE_FMT_LBCS: nemsio +rocoto: + tasks: + task_get_extrn_ics: + walltime: 06:00:00 + task_get_extrn_lbcs: + walltime: 06:00:00 + metatask_run_ensemble: + task_make_lbcs_mem#mem#: + walltime: 06:00:00 + task_run_fcst_mem#mem#: + walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2019_memorial_day_heat_wave.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2019_memorial_day_heat_wave.yaml new file mode 100644 index 0000000000..fcba9c7924 --- /dev/null +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2019_memorial_day_heat_wave.yaml @@ -0,0 +1,36 @@ +metadata: + description: |- + This test is to ensure that the workflow running in community mode + completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16 + physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms + for 2019 Memorial Day Heat Wave. + # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to + # download these files, which can delay the WE2E testing process. + # To capture the event, extend the FCST_LEN_HRS from 6 to 24. +user: + RUN_ENVIR: community +platform: + EXTRN_MDL_DATA_STORES: aws +workflow: + CCPP_PHYS_SUITE: FV3_GFS_v16 + PREDEF_GRID_NAME: RRFS_CONUS_13km + DATE_FIRST_CYCL: '2019052300' + DATE_LAST_CYCL: '2019052300' + FCST_LEN_HRS: 6 + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY + FV3GFS_FILE_FMT_ICS: nemsio +task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY + LBC_SPEC_INTVL_HRS: 6 + FV3GFS_FILE_FMT_LBCS: nemsio +rocoto: + tasks: + task_get_extrn_ics: + walltime: 06:00:00 + task_get_extrn_lbcs: + walltime: 06:00:00 + metatask_run_ensemble: + task_make_lbcs_mem#mem#: + walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2020_denver_radiation_inversion.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2020_denver_radiation_inversion.yaml new file mode 100644 index 0000000000..8bf5ece9ee --- /dev/null +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2020_denver_radiation_inversion.yaml @@ -0,0 +1,38 @@ +metadata: + description: |- + This test is to ensure that the workflow running in community mode + completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16 + physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms + for 2020 Denver Radiation Inversion. + # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to + # download these files, which can delay the WE2E testing process. + # To capture the event, extend the FCST_LEN_HRS from 6 to 90. +user: + RUN_ENVIR: community +platform: + EXTRN_MDL_DATA_STORES: aws +workflow: + CCPP_PHYS_SUITE: FV3_GFS_v16 + PREDEF_GRID_NAME: RRFS_CONUS_13km + DATE_FIRST_CYCL: '2020042912' + DATE_LAST_CYCL: '2020042912' + FCST_LEN_HRS: 6 + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY + FV3GFS_FILE_FMT_ICS: nemsio +task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY + LBC_SPEC_INTVL_HRS: 3 + FV3GFS_FILE_FMT_LBCS: nemsio +rocoto: + tasks: + task_get_extrn_ics: + walltime: 06:00:00 + task_get_extrn_lbcs: + walltime: 06:00:00 + metatask_run_ensemble: + task_make_lbcs_mem#mem#: + walltime: 06:00:00 + task_run_fcst_mem#mem#: + walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2020_easter_storm.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2020_easter_storm.yaml new file mode 100644 index 0000000000..3c619c06bb --- /dev/null +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2020_easter_storm.yaml @@ -0,0 +1,38 @@ +metadata: + description: |- + This test is to ensure that the workflow running in community mode + completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16 + physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms + for 2020 Easter Sunday Storm. + # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to + # download these files, which can delay the WE2E testing process. + # To capture the event, extend the FCST_LEN_HRS from 6 to 90. +user: + RUN_ENVIR: community +platform: + EXTRN_MDL_DATA_STORES: aws +workflow: + CCPP_PHYS_SUITE: FV3_GFS_v16 + PREDEF_GRID_NAME: RRFS_CONUS_13km + DATE_FIRST_CYCL: '2020040912' + DATE_LAST_CYCL: '2020040912' + FCST_LEN_HRS: 6 + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY + FV3GFS_FILE_FMT_ICS: nemsio +task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY + LBC_SPEC_INTVL_HRS: 3 + FV3GFS_FILE_FMT_LBCS: nemsio +rocoto: + tasks: + task_get_extrn_ics: + walltime: 06:00:00 + task_get_extrn_lbcs: + walltime: 06:00:00 + metatask_run_ensemble: + task_make_lbcs_mem#mem#: + walltime: 06:00:00 + task_run_fcst_mem#mem#: + walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2020_jan_cold_blast.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2020_jan_cold_blast.yaml new file mode 100644 index 0000000000..6121228cb8 --- /dev/null +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2020_jan_cold_blast.yaml @@ -0,0 +1,38 @@ +metadata: + description: |- + This test is to ensure that the workflow running in community mode + completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16 + physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms + for 2020 January Cold Blast. + # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to + # download these files, which can delay the WE2E testing process. + # To capture the event, extend the FCST_LEN_HRS from 6 to 90. +user: + RUN_ENVIR: community +platform: + EXTRN_MDL_DATA_STORES: aws +workflow: + CCPP_PHYS_SUITE: FV3_GFS_v16 + PREDEF_GRID_NAME: RRFS_CONUS_13km + DATE_FIRST_CYCL: '2020011812' + DATE_LAST_CYCL: '2020011812' + FCST_LEN_HRS: 6 + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY + FV3GFS_FILE_FMT_ICS: nemsio +task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY + LBC_SPEC_INTVL_HRS: 3 + FV3GFS_FILE_FMT_LBCS: nemsio +rocoto: + tasks: + task_get_extrn_ics: + walltime: 06:00:00 + task_get_extrn_lbcs: + walltime: 06:00:00 + metatask_run_ensemble: + task_make_lbcs_mem#mem#: + walltime: 06:00:00 + task_run_fcst_mem#mem#: + walltime: 06:00:00 From 6ddf61b371fd3952cd6164fca1dad8432ed11dfe Mon Sep 17 00:00:00 2001 From: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com> Date: Wed, 15 May 2024 14:34:41 -0400 Subject: [PATCH 26/42] [develop] Update WM and UPP hashes (#1083) * Update weather model to 26cb9e6 from May 2 and UPP to 5faac75 from April 9 * Increase walltime from 1 hour to 2 hours for the grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0 WE2E test configuration --- Externals.cfg | 4 ++-- ...SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Externals.cfg b/Externals.cfg index c76f7d8845..6a05d66e94 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -12,7 +12,7 @@ protocol = git repo_url = https://github.com/ufs-community/ufs-weather-model # Specify either a branch name or a hash but not both. #branch = develop -hash = 4f32a4b +hash = 26cb9e6 local_path = sorc/ufs-weather-model required = True @@ -21,7 +21,7 @@ protocol = git repo_url = https://github.com/NOAA-EMC/UPP # Specify either a branch name or a hash but not both. #branch = develop -hash = 945cb2c +hash = 5faac75 local_path = sorc/UPP required = True diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml index 120a38291e..0d850b0147 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml @@ -19,7 +19,7 @@ rocoto: taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml", "parm/wflow/test.yaml"]|include }}' metatask_run_ensemble: task_run_fcst_mem#mem#: - walltime: 01:00:00 + walltime: 02:00:00 task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS FV3GFS_FILE_FMT_ICS: grib2 From 28cbbc8cae87f6147f346cd9b94c11aca9c02e37 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 21 May 2024 10:17:29 -0400 Subject: [PATCH 27/42] [develop] Bump requests from 2.31.0 to 2.32.0 in /doc (#1085) updated-dependencies: - dependency-name: requests dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- doc/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/requirements.txt b/doc/requirements.txt index a2f32cd83f..f4dbdfb2a9 100644 --- a/doc/requirements.txt +++ b/doc/requirements.txt @@ -40,7 +40,7 @@ pygments==2.17.2 # via sphinx pyyaml==6.0.1 # via pybtex -requests==2.31.0 +requests==2.32.0 # via sphinx six==1.16.0 # via From 51f4981943337ea54277f4412834ff995b30806a Mon Sep 17 00:00:00 2001 From: Bruce Kropp - Raytheon <104453151+BruceKropp-Raytheon@users.noreply.github.com> Date: Fri, 31 May 2024 05:50:32 -0700 Subject: [PATCH 28/42] [develop] fix CI scripts to save logfile names that Jenkinsfile needs for pwcloud platform builds (#1087) Make sure the log file names match what Jenkinsfile needs, specifically for PW cloud platforms - Azure, AWS, GCP --- .cicd/scripts/srw_build.sh | 2 +- .cicd/scripts/srw_test.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.cicd/scripts/srw_build.sh b/.cicd/scripts/srw_build.sh index 4733c4a4ca..25546561eb 100755 --- a/.cicd/scripts/srw_build.sh +++ b/.cicd/scripts/srw_build.sh @@ -36,6 +36,6 @@ cd - # Create combined log file for upload to s3 build_dir="${workspace}/build_${SRW_COMPILER}" cat ${build_dir}/log.cmake ${build_dir}/log.make \ - >${build_dir}/srw_build-${platform}-${SRW_COMPILER}.txt + >${build_dir}/srw_build-${SRW_PLATFORM}-${SRW_COMPILER}.txt exit $build_exit diff --git a/.cicd/scripts/srw_test.sh b/.cicd/scripts/srw_test.sh index 8ed4756987..90273f2730 100755 --- a/.cicd/scripts/srw_test.sh +++ b/.cicd/scripts/srw_test.sh @@ -45,7 +45,7 @@ fi cd ${we2e_test_dir} # Progress file -progress_file="${workspace}/we2e_test_results-${platform}-${SRW_COMPILER}.txt" +progress_file="${workspace}/we2e_test_results-${SRW_PLATFORM}-${SRW_COMPILER}.txt" /usr/bin/time -p -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' -o ${WORKSPACE}/${SRW_PLATFORM}-${SRW_COMPILER}-time-srw_test.json \ ./setup_WE2E_tests.sh ${platform} ${SRW_PROJECT} ${SRW_COMPILER} ${test_type} \ --expt_basedir=${we2e_experiment_base_dir} | tee ${progress_file}; \ From 4c2cedea792b6f37ac22923d7f8d2844ea0ba7c3 Mon Sep 17 00:00:00 2001 From: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com> Date: Wed, 5 Jun 2024 08:54:55 -0400 Subject: [PATCH 29/42] [develop] Update WM hash to 1c6b4d4 (May 16) and UPP hash to be0410e (April 23) (#1086) * Updated the UFS-WM hash to 1c6b4d4 (May 16) and the UPP hash to be0410e (April 23). * Increased walltime from 01:00:00 to 02:30:00 for the custom_ESGgrid_SF_1p1km WE2E test to allow it to properly run on Hera using executables built with GNU compilers. --- Externals.cfg | 4 ++-- .../custom_grids/config.custom_ESGgrid_SF_1p1km.yaml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Externals.cfg b/Externals.cfg index 6a05d66e94..9b2b544ffd 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -12,7 +12,7 @@ protocol = git repo_url = https://github.com/ufs-community/ufs-weather-model # Specify either a branch name or a hash but not both. #branch = develop -hash = 26cb9e6 +hash = 1c6b4d4 local_path = sorc/ufs-weather-model required = True @@ -21,7 +21,7 @@ protocol = git repo_url = https://github.com/NOAA-EMC/UPP # Specify either a branch name or a hash but not both. #branch = develop -hash = 5faac75 +hash = be0410e local_path = sorc/UPP required = True diff --git a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_SF_1p1km.yaml b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_SF_1p1km.yaml index 6d9e2e0d6d..867b4675a0 100644 --- a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_SF_1p1km.yaml +++ b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_SF_1p1km.yaml @@ -57,7 +57,7 @@ rocoto: tasks: metatask_run_ensemble: task_run_fcst_mem#mem#: - walltime: 01:00:00 + walltime: 02:30:00 task_make_ics_mem#mem#: nnodes: 16 ppn: 12 From 81be59e608c130b2c488356097db539ee1523bb0 Mon Sep 17 00:00:00 2001 From: "Chan-Hoo.Jeon-NOAA" <60152248+chan-hoo@users.noreply.github.com> Date: Fri, 7 Jun 2024 08:53:51 -0400 Subject: [PATCH 30/42] [SRW-AQM] Port SRW-AQM to Derecho (#1090) * Port SRW-AQM to Derecho --------- Co-authored-by: Chan-Hoo Jeon Co-authored-by: Chan-Hoo Jeon Co-authored-by: Chan-Hoo Jeon Co-authored-by: Chan-Hoo Jeon --- Externals.cfg | 4 ++-- .../CustomizingTheWorkflow/ConfigWorkflow.rst | 10 ++++++++-- modulefiles/tasks/derecho/aqm_ics.local.lua | 2 +- modulefiles/tasks/derecho/aqm_lbcs.local.lua | 2 +- modulefiles/tasks/derecho/fire_emission.local.lua | 1 - .../tasks/derecho/nexus_emission.local.lua | 4 +--- modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua | 1 - .../tasks/derecho/nexus_post_split.local.lua | 4 +--- modulefiles/tasks/derecho/point_source.local.lua | 1 - modulefiles/tasks/derecho/pre_post_stat.local.lua | 2 +- modulefiles/wflow_derecho.lua | 2 -- ush/config.aqm.yaml | 2 +- ush/config_defaults.yaml | 15 +++++++++++---- ush/machine/derecho.yaml | 10 ++++++++-- 14 files changed, 35 insertions(+), 25 deletions(-) diff --git a/Externals.cfg b/Externals.cfg index 9b2b544ffd..25ec5f79b9 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -30,7 +30,7 @@ protocol = git repo_url = https://github.com/noaa-oar-arl/NEXUS # Specify either a branch name or a hash but not both. #branch = develop -hash = 40346b6 +hash = e153072 local_path = sorc/arl_nexus required = True @@ -39,7 +39,7 @@ protocol = git repo_url = https://github.com/NOAA-EMC/AQM-utils # Specify either a branch name or a hash but not both. #branch = develop -hash = d953bd1 +hash = e236acd local_path = sorc/AQM-utils required = True diff --git a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst index 3bfa5bdf7d..960275d2bb 100644 --- a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst +++ b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst @@ -156,8 +156,8 @@ These settings define platform-specific run commands. Users should set run comma ``RUN_CMD_SERIAL``: (Default: "") The run command for some serial jobs. -``RUN_CMD_AQM``: (Default: "") - The run command for some AQM tasks. +``RUN_CMD_NEXUS``: (Default: "") + The run command for the AQM NEXUS tasks. ``RUN_CMD_AQMLBC``: (Default: "") The run command for the ``aqm_lbcs`` task. @@ -271,6 +271,12 @@ These parameters are associated with the fixed (i.e., static) files. On :srw-wik ``FIXshp``: (Default: "") System directory containing the graphics shapefiles. On Level 1 systems, these are set within the machine files. Users on other systems will need to provide the path to the directory that contains the *Natural Earth* shapefiles. +``FIXaqm``: (Default: "") + Path to system directory containing AQM fixed files. + +``FIXemis``: (Default: "") + Path to system directory containing AQM emission data files. + ``FIXcrtm``: (Default: "") Path to system directory containing CRTM fixed files. diff --git a/modulefiles/tasks/derecho/aqm_ics.local.lua b/modulefiles/tasks/derecho/aqm_ics.local.lua index 30f1157fbb..9b519c10f6 100644 --- a/modulefiles/tasks/derecho/aqm_ics.local.lua +++ b/modulefiles/tasks/derecho/aqm_ics.local.lua @@ -1,2 +1,2 @@ -load("nco/5.0.6") +load("nco/5.1.9") load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/aqm_lbcs.local.lua b/modulefiles/tasks/derecho/aqm_lbcs.local.lua index 30f1157fbb..9b519c10f6 100644 --- a/modulefiles/tasks/derecho/aqm_lbcs.local.lua +++ b/modulefiles/tasks/derecho/aqm_lbcs.local.lua @@ -1,2 +1,2 @@ -load("nco/5.0.6") +load("nco/5.1.9") load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/fire_emission.local.lua b/modulefiles/tasks/derecho/fire_emission.local.lua index 86252a9a4f..df0e35d5da 100644 --- a/modulefiles/tasks/derecho/fire_emission.local.lua +++ b/modulefiles/tasks/derecho/fire_emission.local.lua @@ -1,2 +1 @@ -load("ncarenv") load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/nexus_emission.local.lua b/modulefiles/tasks/derecho/nexus_emission.local.lua index e7f216375c..9b519c10f6 100644 --- a/modulefiles/tasks/derecho/nexus_emission.local.lua +++ b/modulefiles/tasks/derecho/nexus_emission.local.lua @@ -1,4 +1,2 @@ -load("nco/5.0.6") - -load("ncarenv") +load("nco/5.1.9") load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua b/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua index 86252a9a4f..df0e35d5da 100644 --- a/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua +++ b/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua @@ -1,2 +1 @@ -load("ncarenv") load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/nexus_post_split.local.lua b/modulefiles/tasks/derecho/nexus_post_split.local.lua index e7f216375c..9b519c10f6 100644 --- a/modulefiles/tasks/derecho/nexus_post_split.local.lua +++ b/modulefiles/tasks/derecho/nexus_post_split.local.lua @@ -1,4 +1,2 @@ -load("nco/5.0.6") - -load("ncarenv") +load("nco/5.1.9") load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/point_source.local.lua b/modulefiles/tasks/derecho/point_source.local.lua index 86252a9a4f..df0e35d5da 100644 --- a/modulefiles/tasks/derecho/point_source.local.lua +++ b/modulefiles/tasks/derecho/point_source.local.lua @@ -1,2 +1 @@ -load("ncarenv") load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/pre_post_stat.local.lua b/modulefiles/tasks/derecho/pre_post_stat.local.lua index 30f1157fbb..9b519c10f6 100644 --- a/modulefiles/tasks/derecho/pre_post_stat.local.lua +++ b/modulefiles/tasks/derecho/pre_post_stat.local.lua @@ -1,2 +1,2 @@ -load("nco/5.0.6") +load("nco/5.1.9") load("python_srw_aqm") diff --git a/modulefiles/wflow_derecho.lua b/modulefiles/wflow_derecho.lua index d9a3e24e2f..28bc7ec2f6 100644 --- a/modulefiles/wflow_derecho.lua +++ b/modulefiles/wflow_derecho.lua @@ -5,8 +5,6 @@ on the CISL machine Derecho (Cray) whatis([===[Loads libraries for running the UFS SRW Workflow on Derecho ]===]) -load("ncarenv") - append_path("MODULEPATH","/glade/work/epicufsrt/contrib/derecho/rocoto/modulefiles") load("rocoto") diff --git a/ush/config.aqm.yaml b/ush/config.aqm.yaml index 155f846add..21a73591ee 100644 --- a/ush/config.aqm.yaml +++ b/ush/config.aqm.yaml @@ -24,7 +24,7 @@ workflow: COLDSTART: false # set to true for cold start WARMSTART_CYCLE_DIR: '/scratch2/NAGAPE/epic/SRW-AQM_DATA/aqm_data/restart/2023111000' # for hera # WARMSTART_CYCLE_DIR: '/work/noaa/epic/SRW-AQM_DATA/aqm_data/restart/2023111000' # for orion/hercules -# WARMSTART_CYCLE_DIR: '' # for derecho +# WARMSTART_CYCLE_DIR: '/glade/work/chanhooj/SRW-AQM_DATA/aqm_data/restart/2023111000' # for derecho nco: envir_default: test_aqm_warmstart NET_default: aqm diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 6a403754cb..c9c0fc7cb8 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -245,8 +245,8 @@ platform: # RUN_CMD_SERIAL: # The run command for some serial jobs # - # RUN_CMD_AQM: - # The run command for some AQM tasks. + # RUN_CMD_NEXUS: + # The run command for the AQM NEXUS tasks. # # RUN_CMD_AQMLBC: # The run command for the AQM_LBCS task. @@ -258,9 +258,8 @@ platform: RUN_CMD_FCST: "" RUN_CMD_POST: "" RUN_CMD_PRDGEN: "" - RUN_CMD_AQM: "" + RUN_CMD_NEXUS: "" RUN_CMD_AQMLBC: "" - # #----------------------------------------------------------------------- # @@ -421,6 +420,12 @@ platform: # FIXshp: # System directory where the graphics shapefiles are located. # + # FIXaqm: + # System directory where AQM data files are located + # + # FIXemis: + # System directory where AQM emission data files are located. + # # FIXcrtm: # System directory where CRTM fixed files are located # @@ -435,6 +440,8 @@ platform: FIXorg: "" FIXsfc: "" FIXshp: "" + FIXaqm: "" + FIXemis: "" FIXcrtm: "" FIXcrtmupp: "" # diff --git a/ush/machine/derecho.yaml b/ush/machine/derecho.yaml index b12e65513c..8bc768732f 100644 --- a/ush/machine/derecho.yaml +++ b/ush/machine/derecho.yaml @@ -15,8 +15,8 @@ platform: RUN_CMD_PRDGEN: mpiexec -n $nprocs RUN_CMD_SERIAL: time RUN_CMD_UTILS: mpiexec -n $nprocs - RUN_CMD_NEXUS: mpiexec -n $nprocs - RUN_CMD_AQMLBC: mpiexec -n ${numts} + RUN_CMD_NEXUS: /opt/cray/pe/pals/1.2.11/bin/mpiexec -n $nprocs + RUN_CMD_AQMLBC: /opt/cray/pe/pals/1.2.11/bin/mpiexec -n ${numts} PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }' TEST_EXTRN_MDL_SOURCE_BASEDIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/input_model_data TEST_AQM_INPUT_BASEDIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/aqm_data @@ -31,6 +31,8 @@ platform: FIXorg: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/fix/fix_orog FIXsfc: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/fix/fix_sfc_climo FIXshp: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/NaturalEarth + FIXaqm: /glade/work/chanhooj/SRW-AQM_DATA/fix_aqm + FIXemis: /glade/work/chanhooj/SRW-AQM_DATA/fix_emis EXTRN_MDL_DATA_STORES: aws data: ics_lbcs: @@ -42,3 +44,7 @@ data: HRRR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/input_model_data/HRRR/${yyyymmdd}${hh} RAP: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/input_model_data/RAP/${yyyymmdd}${hh} GSMGFS: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/input_model_data/GSMGFS/${yyyymmdd}${hh} +cpl_aqm_parm: + COMINfire_default: /glade/work/chanhooj/SRW-AQM_DATA/aqm_data/RAVE_fire + COMINgefs_default: /glade/work/chanhooj/SRW-AQM_DATA/aqm_data/GEFS_DATA + NEXUS_GFS_SFC_DIR: /glade/work/chanhooj/SRW-AQM_DATA/aqm_data/GFS_SFC_DATA From dbfed17f6c89b52196bb9cf8fab0873e65989fb4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 18 Jun 2024 10:13:44 -0400 Subject: [PATCH 31/42] Bump urllib3 from 2.2.0 to 2.2.2 in /doc (#1097) Bumps [urllib3](https://github.com/urllib3/urllib3) from 2.2.0 to 2.2.2. - [Release notes](https://github.com/urllib3/urllib3/releases) - [Changelog](https://github.com/urllib3/urllib3/blob/main/CHANGES.rst) - [Commits](https://github.com/urllib3/urllib3/compare/2.2.0...2.2.2) --- updated-dependencies: - dependency-name: urllib3 dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- doc/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/requirements.txt b/doc/requirements.txt index f4dbdfb2a9..90efd3211e 100644 --- a/doc/requirements.txt +++ b/doc/requirements.txt @@ -72,5 +72,5 @@ sphinxcontrib-qthelp==1.0.7 # via sphinx sphinxcontrib-serializinghtml==1.1.10 # via sphinx -urllib3==2.2.0 +urllib3==2.2.2 # via requests From 94dc192a033a5e5752f43a2d898aca2b2f0c3b56 Mon Sep 17 00:00:00 2001 From: RatkoVasic-NOAA <37597874+RatkoVasic-NOAA@users.noreply.github.com> Date: Fri, 21 Jun 2024 10:59:31 -0400 Subject: [PATCH 32/42] [develop] Upgrade SRW to spack-stack 1.6.0 from 1.5.1 (#1093) Since the ufs-weather-model was upgraded to spack-stack 1.6.0, the SRW App has been upgraded as well. --------- Co-authored-by: EdwardSnyder-NOAA --- modulefiles/build_derecho_intel.lua | 6 ++--- modulefiles/build_gaea_intel.lua | 4 ++-- modulefiles/build_hera_gnu.lua | 23 +++++++++++-------- modulefiles/build_hera_intel.lua | 7 +++--- modulefiles/build_hercules_intel.lua | 9 ++++---- modulefiles/build_jet_intel.lua | 5 ++-- modulefiles/build_noaacloud_intel.lua | 2 +- modulefiles/build_orion_intel.lua | 19 +++++++-------- modulefiles/srw_common.lua | 12 +++++----- .../tasks/noaacloud/plot_allvars.local.lua | 7 ++---- modulefiles/tasks/noaacloud/python_srw.lua | 5 ++++ modulefiles/tasks/noaacloud/run_vx.local.lua | 5 ++++ modulefiles/tasks/orion/run_vx.local.lua | 6 +---- modulefiles/wflow_noaacloud.lua | 10 ++++---- modulefiles/wflow_orion.lua | 5 ++-- 15 files changed, 65 insertions(+), 60 deletions(-) diff --git a/modulefiles/build_derecho_intel.lua b/modulefiles/build_derecho_intel.lua index e057c9e5dc..491a94f912 100644 --- a/modulefiles/build_derecho_intel.lua +++ b/modulefiles/build_derecho_intel.lua @@ -6,15 +6,15 @@ the CISL machine Derecho (Cray) using Intel@2021.10.0 whatis([===[Loads libraries needed for building the UFS SRW App on Derecho ]===]) prepend_path("MODULEPATH","/lustre/desc1/scratch/epicufsrt/contrib/modulefiles_extra") -prepend_path("MODULEPATH", "/glade/work/epicufsrt/contrib/spack-stack/derecho/spack-stack-1.5.1/envs/unified-env/install/modulefiles/Core") +prepend_path("MODULEPATH", "/glade/work/epicufsrt/contrib/spack-stack/derecho/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core") load(pathJoin("stack-intel", os.getenv("stack_intel_ver") or "2021.10.0")) load(pathJoin("stack-cray-mpich", os.getenv("stack_cray_mpich_ver") or "8.1.25")) -load(pathJoin("cmake", os.getenv("cmake_ver") or "3.26.3")) +load(pathJoin("cmake", os.getenv("cmake_ver") or "3.23.1")) load("srw_common") -load(pathJoin("prod_util", os.getenv("prod_util_ver") or "1.2.2")) +load(pathJoin("prod_util", os.getenv("prod_util_ver") or "2.1.1")) setenv("CMAKE_Platform","derecho.intel") diff --git a/modulefiles/build_gaea_intel.lua b/modulefiles/build_gaea_intel.lua index 0eca20b5e1..b47209194c 100644 --- a/modulefiles/build_gaea_intel.lua +++ b/modulefiles/build_gaea_intel.lua @@ -5,14 +5,14 @@ the NOAA RDHPC machine Gaea C5 using Intel-2023.1.0 whatis([===[Loads libraries needed for building the UFS SRW App on Gaea C5 ]===]) -prepend_path("MODULEPATH","/ncrc/proj/epic/spack-stack/spack-stack-1.5.1/envs/unified-env/install/modulefiles/Core") +prepend_path("MODULEPATH","/ncrc/proj/epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core") stack_intel_ver=os.getenv("stack_intel_ver") or "2023.1.0" load(pathJoin("stack-intel", stack_intel_ver)) stack_mpich_ver=os.getenv("stack_mpich_ver") or "8.1.25" load(pathJoin("stack-cray-mpich", stack_mpich_ver)) -stack_python_ver=os.getenv("stack_python_ver") or "3.10.8" +stack_python_ver=os.getenv("stack_python_ver") or "3.10.13" load(pathJoin("stack-python", stack_python_ver)) cmake_ver=os.getenv("cmake_ver") or "3.23.1" diff --git a/modulefiles/build_hera_gnu.lua b/modulefiles/build_hera_gnu.lua index 7defa36bbf..8854108966 100644 --- a/modulefiles/build_hera_gnu.lua +++ b/modulefiles/build_hera_gnu.lua @@ -1,23 +1,28 @@ help([[ This module loads libraries for building the UFS SRW App on -the NOAA RDHPC machine Hera using GNU 9.2.0 +the NOAA RDHPC machine Hera using GNU 13.3.0 ]]) -whatis([===[Loads libraries needed for building the UFS SRW App on Hera using GNU 9.2.0 ]===]) +whatis([===[Loads libraries needed for building the UFS SRW App on Hera using GNU 13.3.0 ]===]) -prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.1/envs/unified-env-rocky8/install/modulefiles/Core") -prepend_path("MODULEPATH", "/scratch1/NCEPDEV/jcsda/jedipara/spack-stack/modulefiles") +prepend_path("MODULEPATH", "/scratch2/NCEPDEV/stmp1/role.epic/installs/gnu/modulefiles") +prepend_path("MODULEPATH", "/scratch2/NCEPDEV/stmp1/role.epic/installs/openmpi/modulefiles") +prepend_path("MODULEPATH", "/scratch2/NCEPDEV/stmp1/role.epic/spack-stack/spack-stack-1.6.0_gnu13/envs/ufs-wm-srw-rocky8/install/modulefiles/Core") -load("stack-gcc/9.2.0") -load("stack-openmpi/4.1.5") -load("stack-python/3.10.8") +load("stack-gcc/13.3.0") +load("stack-openmpi/4.1.6") +load("stack-python/3.10.13") load("cmake/3.23.1") load("srw_common") load(pathJoin("nccmp", os.getenv("nccmp_ver") or "1.9.0.1")) -load(pathJoin("nco", os.getenv("nco_ver") or "5.0.6")) -load(pathJoin("openblas", os.getenv("openblas_ver") or "0.3.19")) +load(pathJoin("nco", os.getenv("nco_ver") or "5.1.6")) +load(pathJoin("openblas", os.getenv("openblas_ver") or "0.3.24")) + +prepend_path("CPPFLAGS", " -I/apps/slurm_hera/23.11.3/include/slurm"," ") +prepend_path("LD_LIBRARY_PATH", "/apps/slurm_hera/23.11.3/lib") +setenv("LD_PRELOAD", "/scratch2/NCEPDEV/stmp1/role.epic/installs/gnu/13.3.0/lib64/libstdc++.so.6") setenv("CC", "mpicc") setenv("CXX", "mpic++") diff --git a/modulefiles/build_hera_intel.lua b/modulefiles/build_hera_intel.lua index 72a90d9f47..d8e793044c 100644 --- a/modulefiles/build_hera_intel.lua +++ b/modulefiles/build_hera_intel.lua @@ -8,8 +8,7 @@ whatis([===[Loads libraries needed for building the UFS SRW App on Hera ]===]) prepend_path("MODULEPATH","/contrib/sutils/modulefiles") load("sutils") -prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.1/envs/unified-env-rocky8/install/modulefiles/Core") -prepend_path("MODULEPATH", "/scratch1/NCEPDEV/jcsda/jedipara/spack-stack/modulefiles") +prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.6.0/envs/unified-env-rocky8/install/modulefiles/Core") stack_intel_ver=os.getenv("stack_intel_ver") or "2021.5.0" load(pathJoin("stack-intel", stack_intel_ver)) @@ -17,7 +16,7 @@ load(pathJoin("stack-intel", stack_intel_ver)) stack_impi_ver=os.getenv("stack_impi_ver") or "2021.5.1" load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver)) -stack_python_ver=os.getenv("stack_python_ver") or "3.10.8" +stack_python_ver=os.getenv("stack_python_ver") or "3.10.13" load(pathJoin("stack-python", stack_python_ver)) cmake_ver=os.getenv("cmake_ver") or "3.23.1" @@ -27,7 +26,7 @@ load("srw_common") load(pathJoin("nccmp", os.getenv("nccmp_ver") or "1.9.0.1")) load(pathJoin("nco", os.getenv("nco_ver") or "5.0.6")) -load(pathJoin("prod_util", os.getenv("prod_util_ver") or "1.2.2")) +load(pathJoin("prod_util", os.getenv("prod_util_ver") or "2.1.1")) setenv("CMAKE_C_COMPILER","mpiicc") setenv("CMAKE_CXX_COMPILER","mpiicpc") diff --git a/modulefiles/build_hercules_intel.lua b/modulefiles/build_hercules_intel.lua index 531f48a080..b65890f1c4 100644 --- a/modulefiles/build_hercules_intel.lua +++ b/modulefiles/build_hercules_intel.lua @@ -5,19 +5,18 @@ the MSU machine Hercules using intel-oneapi-compilers/2022.2.1 whatis([===[Loads libraries needed for building the UFS SRW App on Hercules ]===]) -prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.5.1/envs/unified-env/install/modulefiles/Core") -prepend_path("MODULEPATH", "/work/noaa/da/role-da/spack-stack/modulefiles") +prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core") load("stack-intel/2021.9.0") load("stack-intel-oneapi-mpi/2021.9.0") -load("stack-python/3.10.8") -load("cmake/3.26.3") +load("stack-python/3.10.13") +load("cmake/3.23.1") load("srw_common") load("nccmp/1.9.0.1") load("nco/5.0.6") -load(pathJoin("prod_util", os.getenv("prod_util_ver") or "1.2.2")) +load(pathJoin("prod_util", os.getenv("prod_util_ver") or "2.1.1")) setenv("CFLAGS","-diag-disable=10441") setenv("FFLAGS","-diag-disable=10441") diff --git a/modulefiles/build_jet_intel.lua b/modulefiles/build_jet_intel.lua index 925fef3853..854b4404cb 100644 --- a/modulefiles/build_jet_intel.lua +++ b/modulefiles/build_jet_intel.lua @@ -5,12 +5,11 @@ the NOAA RDHPC machine Jet using Intel-2021.5.0 whatis([===[Loads libraries needed for building the UFS SRW App on Jet ]===]) -prepend_path("MODULEPATH","/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.5.1/envs/unified-env-rocky8/install/modulefiles/Core") -prepend_path("MODULEPATH", "/lfs4/HFIP/hfv3gfs/spack-stack/modulefiles") +prepend_path("MODULEPATH","/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.6.0/envs/unified-env-rocky8/install/modulefiles/Core") load("stack-intel/2021.5.0") load("stack-intel-oneapi-mpi/2021.5.1") -load("stack-python/3.10.8") +load("stack-python/3.10.13") load("cmake/3.23.1") load("srw_common") diff --git a/modulefiles/build_noaacloud_intel.lua b/modulefiles/build_noaacloud_intel.lua index 0b6a9c1ca4..dd774e8ed9 100644 --- a/modulefiles/build_noaacloud_intel.lua +++ b/modulefiles/build_noaacloud_intel.lua @@ -5,7 +5,7 @@ the NOAA cloud using Intel-oneapi whatis([===[Loads libraries needed for building the UFS SRW App on NOAA cloud ]===]) -prepend_path("MODULEPATH", "/contrib/spack-stack/spack-stack-1.5.1/envs/unified-env/install/modulefiles/Core") +prepend_path("MODULEPATH", "/contrib/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core") prepend_path("MODULEPATH", "/apps/modules/modulefiles") prepend_path("PATH", "/contrib/EPIC/bin") load("stack-intel") diff --git a/modulefiles/build_orion_intel.lua b/modulefiles/build_orion_intel.lua index 8e895c5bee..b2f3d85c00 100644 --- a/modulefiles/build_orion_intel.lua +++ b/modulefiles/build_orion_intel.lua @@ -1,24 +1,25 @@ help([[ This module loads libraries for building the UFS SRW App on -the MSU machine Orion using Intel-2022.1.2 +the MSU machine Orion using intel-oneapi-compilers/2021.9.0 ]]) whatis([===[Loads libraries needed for building the UFS SRW App on Orion ]===]) -prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.5.1/envs/unified-env/install/modulefiles/Core") -prepend_path("MODULEPATH", "/work/noaa/da/role-da/spack-stack/modulefiles") +prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.6.0/envs/unified-env-rocky9/install/modulefiles/Core") -load("stack-intel/2022.0.2") -load("stack-intel-oneapi-mpi/2021.5.1") -load("stack-python/3.10.8") -load("cmake/3.22.1") +load("stack-intel/2021.9.0") +load("stack-intel-oneapi-mpi/2021.9.0") +load("stack-python/3.10.13") +load("cmake/3.23.1") load("srw_common") load("nccmp/1.9.0.1") load("nco/5.0.6") -load("wget") -load(pathJoin("prod_util", os.getenv("prod_util_ver") or "1.2.2")) +load(pathJoin("prod_util", os.getenv("prod_util_ver") or "2.1.1")) + +setenv("CFLAGS","-diag-disable=10441") +setenv("FFLAGS","-diag-disable=10441") setenv("CMAKE_C_COMPILER","mpiicc") setenv("CMAKE_CXX_COMPILER","mpiicpc") diff --git a/modulefiles/srw_common.lua b/modulefiles/srw_common.lua index 79c67283f9..cb2047cbe1 100644 --- a/modulefiles/srw_common.lua +++ b/modulefiles/srw_common.lua @@ -3,21 +3,21 @@ load("zlib/1.2.13") load("libpng/1.6.37") load("netcdf-c/4.9.2") -load("netcdf-fortran/4.6.0") +load("netcdf-fortran/4.6.1") load("parallelio/2.5.10") -load("esmf/8.5.0") -load("fms/2023.02.01") +load("esmf/8.6.0") +load("fms/2023.04") load("bacio/2.4.1") -load("crtm/2.4.0") +load("crtm/2.4.0.1") load("g2/3.4.5") load("g2tmpl/1.10.2") load("ip/4.3.0") -load("sp/2.3.3") +load("sp/2.5.0") load("w3emc/2.10.0") load("gftl-shared/1.6.1") -load("mapl/2.40.3-esmf-8.5.0") +load("mapl/2.40.3-esmf-8.6.0") load("nemsio/2.5.4") load("sfcio/1.4.1") diff --git a/modulefiles/tasks/noaacloud/plot_allvars.local.lua b/modulefiles/tasks/noaacloud/plot_allvars.local.lua index cc122f69b2..b7e9528710 100644 --- a/modulefiles/tasks/noaacloud/plot_allvars.local.lua +++ b/modulefiles/tasks/noaacloud/plot_allvars.local.lua @@ -1,5 +1,2 @@ -unload("python") -append_path("MODULEPATH","/contrib/EPIC/miniconda3/modulefiles") -load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) - -setenv("SRW_ENV", "regional_workflow") +load("conda") +setenv("SRW_ENV", "srw_graphics") diff --git a/modulefiles/tasks/noaacloud/python_srw.lua b/modulefiles/tasks/noaacloud/python_srw.lua index a2dd45084c..e6e4268c35 100644 --- a/modulefiles/tasks/noaacloud/python_srw.lua +++ b/modulefiles/tasks/noaacloud/python_srw.lua @@ -1,2 +1,7 @@ load("conda") setenv("SRW_ENV", "srw_app") + +-- Add missing libstdc binary for Azure +if os.getenv("PW_CSP") == "azure" then + setenv("LD_PRELOAD","/opt/nvidia/nsight-systems/2023.1.2/host-linux-x64/libstdc++.so.6") +end diff --git a/modulefiles/tasks/noaacloud/run_vx.local.lua b/modulefiles/tasks/noaacloud/run_vx.local.lua index 737fc4f7cc..67b1b98ad6 100644 --- a/modulefiles/tasks/noaacloud/run_vx.local.lua +++ b/modulefiles/tasks/noaacloud/run_vx.local.lua @@ -25,3 +25,8 @@ end load("ufs-pyenv") load("conda") setenv("SRW_ENV", "srw_app") + +-- Add missing libstdc binary for Azure +if os.getenv("PW_CSP") == "azure" then + setenv("LD_PRELOAD","/opt/nvidia/nsight-systems/2023.1.2/host-linux-x64/libstdc++.so.6") +end diff --git a/modulefiles/tasks/orion/run_vx.local.lua b/modulefiles/tasks/orion/run_vx.local.lua index 5bafb4d46b..737fc4f7cc 100644 --- a/modulefiles/tasks/orion/run_vx.local.lua +++ b/modulefiles/tasks/orion/run_vx.local.lua @@ -1,8 +1,6 @@ --[[ Compiler-specific modules are used for met and metplus libraries --]] ---load("build_orion_intel") - local met_ver = (os.getenv("met_ver") or "11.1.0") local metplus_ver = (os.getenv("metplus_ver") or "5.1.0") if (mode() == "load") then @@ -20,12 +18,10 @@ setenv("METPLUS_VERSION", metplus_ver) setenv("METPLUS_ROOT", base_metplus) setenv("METPLUS_PATH", base_metplus) - if (mode() == "unload") then unload(pathJoin("met", met_ver)) unload(pathJoin("metplus",metplus_ver)) end ---load("ufs-pyenv") -load("stack-python/3.10.8") +load("ufs-pyenv") load("conda") setenv("SRW_ENV", "srw_app") diff --git a/modulefiles/wflow_noaacloud.lua b/modulefiles/wflow_noaacloud.lua index ebf907545b..5e0c0ca50a 100644 --- a/modulefiles/wflow_noaacloud.lua +++ b/modulefiles/wflow_noaacloud.lua @@ -8,15 +8,15 @@ whatis([===[Loads libraries needed for running the UFS SRW App on NOAA cloud ]== prepend_path("MODULEPATH","/apps/modules/modulefiles") load("rocoto") - - load("conda") -setenv("PROJ_LIB","/contrib/EPIC/miniconda3/4.12.0/envs/regional_workflow/share/proj") -setenv("OPT","/contrib/EPIC/hpc-modules") -append_path("PATH","/contrib/EPIC/miniconda3/4.12.0/envs/regional_workflow/bin") prepend_path("PATH","/contrib/EPIC/bin") +-- Add missing libstdc binary for Azure +if os.getenv("PW_CSP") == "azure" then + setenv("LD_PRELOAD","/opt/nvidia/nsight-systems/2023.1.2/host-linux-x64/libstdc++.so.6") +end + if mode() == "load" then LmodMsgRaw([===[Please do the following to activate conda: > conda activate srw_app diff --git a/modulefiles/wflow_orion.lua b/modulefiles/wflow_orion.lua index 711991bb09..8bbc5663da 100644 --- a/modulefiles/wflow_orion.lua +++ b/modulefiles/wflow_orion.lua @@ -6,9 +6,8 @@ the MSU machine Orion whatis([===[Loads libraries needed for running SRW on Orion ]===]) load("contrib") -load("rocoto") -load("wget") - +load("ruby/3.2.3") +load("rocoto/1.3.7") unload("python") load("conda") From fe8fc68b0c22ccbd2181b28b88a0e77a9f6b3ba5 Mon Sep 17 00:00:00 2001 From: jdkublnick <47824899+jdkublnick@users.noreply.github.com> Date: Fri, 21 Jun 2024 11:07:42 -0400 Subject: [PATCH 33/42] [develop]: Updated ConfigWorkflow.rst to reflect changes to config_defaults.yaml (PI12) (#1095) Updated ConfigWorkflow.rst to reflect recent changes to config_defaults.yaml in order to keep documentation up to date. --------- Co-authored-by: Gillian Petro <96886803+gspetro-NOAA@users.noreply.github.com> Co-authored-by: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com> --- .../CustomizingTheWorkflow/ConfigWorkflow.rst | 94 ++++++------------- 1 file changed, 27 insertions(+), 67 deletions(-) diff --git a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst index 960275d2bb..52cce90c2c 100644 --- a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst +++ b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst @@ -538,7 +538,7 @@ CCPP Parameter ``CCPP_PHYS_SUITE_FP``: (Default: ``'{{ [workflow.EXPTDIR, CCPP_PHYS_SUITE_FN]|path_join }}'``) The full path to the suite definition file (SDF) in the experiment directory. -``CCPP_PHYS_DIR``: (Default: ``'{{ [user.UFS_WTHR_MDL_DIR, "FV3", "ccpp", "physics", "physics"] |path_join }}'``) +``CCPP_PHYS_DIR``: (Default: ``'{{ [user.UFS_WTHR_MDL_DIR, "FV3", "ccpp", "physics", "physics", "SFC_Models", "Land", "Noahmp"] |path_join }}'``) The directory containing the CCPP physics source code. This is needed to link table(s) contained in that repository. Field Dictionary Parameters @@ -707,7 +707,7 @@ A standard set of environment variables has been established for *nco* mode to s ``envir_default, NET_default, model_ver_default, RUN_default``: Standard environment variables defined in the NCEP Central Operations WCOSS Implementation Standards document. These variables are used in forming the path to various directories containing input, output, and workflow files. The variables are defined in the :nco:`WCOSS Implementation Standards ` document (pp. 4-5) as follows: - ``envir_default``: (Default: "para") + ``envir_default``: (Default: "test") Set to "test" during the initial testing phase, "para" when running in parallel (on a schedule), and "prod" in production. ``NET_default``: (Default: "srw") @@ -719,46 +719,28 @@ A standard set of environment variables has been established for *nco* mode to s ``RUN_default``: (Default: "srw") Name of model run (third level of ``com`` directory structure). In general, same as ``${NET_default}``. -``OPSROOT_default``: (Default: ``'{{ workflow.EXPT_BASEDIR }}/../nco_dirs'``) - The operations root directory in *nco* mode. - -``COMROOT_default``: (Default: ``'{{ OPSROOT_default }}/com'``) - The ``com`` root directory for input/output data that is located on the current system (typically ``$OPSROOT_default/com``). - -``DATAROOT_default``: (Default: ``'{{OPSROOT_default }}/tmp'``) - Directory containing the (temporary) working directory for running jobs; typically named ``$OPSROOT_default/tmp`` in production. - -``DCOMROOT_default``: (Default: ``'{{OPSROOT_default }}/dcom'``) - ``dcom`` root directory, typically ``$OPSROOT_default/dcom``. This directory contains input/incoming data that is retrieved from outside WCOSS. - -``LOGBASEDIR_default``: (Default: ``'{% if user.RUN_ENVIR == "nco" %}{{ [OPSROOT_default, "output"]|path_join }}{% else %}{{ [workflow.EXPTDIR, "log"]|path_join }}{% endif %}'``) - Directory in which the log files from the workflow tasks will be placed. - -``COMIN_BASEDIR``: (Default: ``'{{ COMROOT_default }}/{{ NET_default }}/{{ model_ver_default }}'``) - ``com`` directory for current model's input data, typically ``$COMROOT/$NET/$model_ver/$RUN.$PDY``. - -``COMOUT_BASEDIR``: (Default: ``'{{ COMROOT_default }}/{{ NET_default }}/{{ model_ver_default }}'``) - ``com`` directory for current model's output data, typically ``$COMROOT/$NET/$model_ver/$RUN.$PDY``. +``PTMP``: (Default ``'{{ workflow.EXPT_BASEDIR }}/../nco_dirs'``) + User-defined path to the com type directories (``OPSROOT=$PTMP/$envir``). ``DBNROOT_default``: (Default: "") Root directory for the data-alerting utilities. -``SENDECF_default``: (Default: false) +``SENDECF_default``: (Default: "NO") Boolean variable used to control ``ecflow_client`` child commands. -``SENDDBN_default``: (Default: false) +``SENDDBN_default``: (Default: "NO") Boolean variable used to control sending products off WCOSS2. -``SENDDBN_NTC_default``: (Default: false) +``SENDDBN_NTC_default``: (Default: "NO") Boolean variable used to control sending products with WMO headers off WCOSS2. -``SENDCOM_default``: (Default: false) +``SENDCOM_default``: (Default: "YES") Boolean variable to control data copies to ``$COMOUT``. -``SENDWEB_default``: (Default: false) +``SENDWEB_default``: (Default: "NO") Boolean variable used to control sending products to a web server, often ``ncorzdm``. -``KEEPDATA_default``: (Default: true) +``KEEPDATA_default``: (Default: "YES") Boolean variable used to specify whether or not the working directory should be kept upon successful job completion. ``MAILTO_default``: (Default: "") @@ -1382,6 +1364,9 @@ Non-default parameters for the ``nexus_emission_*`` tasks are set in the ``task_ ``PPN_NEXUS_EMISSION``: (Default: ``'{{ platform.NCORES_PER_NODE // OMP_NUM_THREADS_NEXUS_EMISSION }}'``) Processes per node for the ``nexus_emission_*`` tasks. +``NNODES_NEXUS_EMISSION``: (Default: 4) + The number of nodes to request from the job scheduler for the NEXUS emission task. + ``KMP_AFFINITY_NEXUS_EMISSION``: (Default: "scatter") Intel Thread Affinity Interface for the ``nexus_emission_*`` tasks. See :ref:`this note ` for more information on thread affinity. @@ -1391,12 +1376,20 @@ Non-default parameters for the ``nexus_emission_*`` tasks are set in the ``task_ ``OMP_STACKSIZE_NEXUS_EMISSION``: (Default: "1024m") Controls the size of the stack for threads created by the OpenMP implementation. +POINT_SOURCE Configuration Parameters +------------------------------------------------ +Non-default parameters for the ``task_point_source`` tasks are set in the ``task_point_source:`` section of the ``config.yaml`` file. + +``PT_SRC_SUBDIR``: (Default: ``"NEI2016v1/v2023-01-PT"``) + Subdirectory structure of point source data under ``FIXemis``. + Full path: ``FIXemis/PT_SRC_SUBDIR`` + BIAS_CORRECTION_O3 Configuration Parameters ------------------------------------------------- Non-default parameters for the ``bias_correction_o3`` tasks are set in the ``task_bias_correction_o3:`` section of the ``config.yaml`` file. -``KMP_AFFINITY_BIAS_CORRECTION_O3``: "scatter" +``KMP_AFFINITY_BIAS_CORRECTION_O3``: (Default: "scatter") Intel Thread Affinity Interface for the ``bias_correction_o3`` task. See :ref:`this note ` for more information on thread affinity. ``OMP_NUM_THREADS_BIAS_CORRECTION_O3``: (Default: 32) @@ -1750,38 +1743,14 @@ Non-default parameters for coupled Air Quality Modeling (AQM) tasks are set in t ``DO_AQM_SAVE_FIRE``: (Default: false) Archive fire emission file to HPSS. -``DCOMINbio_default``: (Default: "") - Path to the directory containing AQM bio files. - -``DCOMINdust_default``: (Default: "/path/to/dust/dir") - Path to the directory containing AQM dust file. - -``DCOMINcanopy_default``: (Default: "/path/to/canopy/dir") - Path to the directory containing AQM canopy files. - -``DCOMINfire_default``: (Default: "") - Path to the directory containing AQM fire files. - -``DCOMINchem_lbcs_default``: (Default: "") - Path to the directory containing chemical LBC files. - -``DCOMINgefs_default``: (Default: "") - Path to the directory containing GEFS aerosol LBC files. - -``DCOMINpt_src_default``: (Default: "/path/to/point/source/base/directory") - Parent directory containing point source files. - -``DCOMINairnow_default``: (Default: "/path/to/airnow/obaservation/data") +``COMINairnow_default``: (Default: "/path/to/airnow/observation/data") Path to the directory containing AIRNOW observation data. -``COMINbicor``: (Default: "/path/to/historical/airnow/data/dir") - Path of reading in historical training data for bias correction. - -``COMOUTbicor``: (Default: "/path/to/historical/airnow/data/dir") - Path to save the current cycle's model output and AirNow observations as training data for future use. ``$COMINbicor`` and ``$COMOUTbicor`` can be distinguished by the ``${yyyy}${mm}${dd}`` under the same location. +``COMINfire_default``: (Default: "") + Path to the directory containing AQM fire files. -``AQM_CONFIG_DIR``: (Default: "") - Configuration directory for AQM. +``COMINgefs_default``:(Default: "") + Path to the directory containing GEFS aerosol LBC files. ``AQM_BIO_FILE``: (Default: "BEIS_SARC401.ncf") File name of AQM BIO file. @@ -1807,9 +1776,6 @@ Non-default parameters for coupled Air Quality Modeling (AQM) tasks are set in t ``AQM_FIRE_FILE_OFFSET_HRS``: (Default: 0) Time offset when retrieving fire emission data files. In a real-time run, the data files for :term:`ICs/LBCs` are not ready for use until the case starts. To resolve this issue, a real-time run uses the input data files in the previous cycle. For example, if the experiment run cycle starts at 12z, and ``AQM_FIRE_FILE_OFFSET_HRS: 6``, the fire emission data file from the previous cycle (06z) is used. -``AQM_FIRE_ARCHV_DIR``: (Default: "/path/to/archive/dir/for/RAVE/on/HPSS") - Path to the archive directory for RAVE emission files on :term:`HPSS`. - ``AQM_RC_FIRE_FREQUENCY``: (Default: "static") Fire frequency in ``aqm.rc``. @@ -1828,12 +1794,6 @@ Non-default parameters for coupled Air Quality Modeling (AQM) tasks are set in t ``AQM_GEFS_FILE_CYC``: (Default: "") Cycle of the GEFS aerosol LBC files only if it is fixed. -``NEXUS_INPUT_DIR``: (Default: "") - Same as ``GRID_DIR`` but for the the air quality emission generation task. Should be blank for the default value specified in ``setup.sh``. - -``NEXUS_FIX_DIR``: (Default: "") - Directory containing ``grid_spec`` files as the input file of NEXUS. - ``NEXUS_GRID_FN``: (Default: "grid_spec_GSD_HRRR_25km.nc") File name of the input ``grid_spec`` file of NEXUS. From e5832d184575985f5bbc613a427696eb76cf31d1 Mon Sep 17 00:00:00 2001 From: gsketefian <31046882+gsketefian@users.noreply.github.com> Date: Fri, 12 Jul 2024 06:57:23 -0600 Subject: [PATCH 34/42] [develop] Bug fix to support the %H format in METplus via printf. (#1102) This bug was encountered when verifying forecast output that has a 2-digit forecast hour in its name. It turns out specifying the METplus format %H to obtain a 2-digit forecast hour in the workflow/verification configuration variable FCST_FN_TEMPLATE (and others) causes an error in the shell script eval_METplus_timestr_tmpl.sh because bash's printf utility does not support the %H format. This fixes that error using a similar approach to the %HHH format for obtaining 3-digit hours. --- ush/bash_utils/eval_METplus_timestr_tmpl.sh | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/ush/bash_utils/eval_METplus_timestr_tmpl.sh b/ush/bash_utils/eval_METplus_timestr_tmpl.sh index 245369509b..572f7c68c4 100644 --- a/ush/bash_utils/eval_METplus_timestr_tmpl.sh +++ b/ush/bash_utils/eval_METplus_timestr_tmpl.sh @@ -163,9 +163,23 @@ cannot be empty: #----------------------------------------------------------------------- # case "${METplus_time_fmt}" in - "%Y%m%d%H"|"%Y%m%d"|"%H%M%S"|"%H") + "%Y%m%d%H"|"%Y%m%d"|"%H%M%S") fmt="${METplus_time_fmt}" ;; + "%H") +# +# The "%H" format needs to be treated differently depending on if it's +# formatting a "lead" time type or another (e.g. "init" or "vald") because +# for "lead", the printf function is used below (which doesn't understand +# the "%H" format) whereas for the others, the date utility is used (which +# does understand "%H"). +# + if [ "${METplus_time_type}" = "lead" ]; then + fmt="%02.0f" + else + fmt="${METplus_time_fmt}" + fi + ;; "%HHH") # # Print format assumes that the argument to printf (i.e. the number to From 29429fedec7155c4815bcf4f7083e3dbadafa7d3 Mon Sep 17 00:00:00 2001 From: Gillian Petro <96886803+gspetro-NOAA@users.noreply.github.com> Date: Mon, 15 Jul 2024 12:53:25 -0400 Subject: [PATCH 35/42] [develop]: Update requests and certifi in requirements.txt (#1103) * The Dependabot PR #1101 identified the need to update the certifi version, but requests should also be updated from the current (yanked) version in the requirements file. * The README.md and doc/README files have also been updated. --- README.md | 6 +++--- doc/README | 5 +++-- doc/requirements.txt | 6 +++--- 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 3bf56f4c21..bdda52279d 100644 --- a/README.md +++ b/README.md @@ -1,13 +1,13 @@ # UFS Short-Range Weather Application -The Unified Forecast System (UFS) is a community-based, coupled, comprehensive Earth modeling system. NOAA's operational model suite for numerical weather prediction (NWP) is quickly transitioning to the UFS from a number of legacy modeling systems. The UFS enables research, development, and contribution opportunities within the broader Weather Enterprise (including government, industry, and academia). For more information about the UFS, visit the UFS Portal at https://ufscommunity.org/. +The Unified Forecast System (UFS) is a community-based, coupled, comprehensive Earth modeling system. NOAA's operational model suite for numerical weather prediction (NWP) is quickly transitioning to the UFS from a number of legacy modeling systems. The UFS enables research, development, and contribution opportunities within the broader Weather Enterprise (including government, industry, and academia). For more information about the UFS, visit the UFS Portal at https://ufs.epic.noaa.gov/. -The UFS includes multiple applications (see a complete list at https://ufscommunity.org/science/aboutapps/) that support different forecast durations and spatial domains. This documentation describes the development branch of the UFS Short-Range Weather (SRW) Application, which targets predictions of atmospheric behavior on a limited spatial domain and on time scales from minutes to several days. The development branch of the application is continually evolving as the system undergoes open development. The latest SRW App release (v2.2.0) represents a snapshot of this continuously evolving system. +The UFS includes multiple applications (see a complete list at https://ufs.epic.noaa.gov/applications/) that support different forecast durations and spatial domains. This documentation describes the development branch of the UFS Short-Range Weather (SRW) Application, which targets predictions of atmospheric behavior on a limited spatial domain and on time scales from minutes to several days. The development branch of the application is continually evolving as the system undergoes open development. The latest SRW App release (v2.2.0) represents a snapshot of this continuously evolving system. The UFS SRW App User's Guide associated with the development branch is at: https://ufs-srweather-app.readthedocs.io/en/develop/, while the guide specific to the SRW App v2.2.0 release can be found at: https://ufs-srweather-app.readthedocs.io/en/release-public-v2.2.0/. The repository is at: https://github.com/ufs-community/ufs-srweather-app. For instructions on how to clone the repository, build the code, and run the workflow, see: -- https://ufs-srweather-app.readthedocs.io/en/develop/BuildingRunningTesting/Quickstart.html +- https://ufs-srweather-app.readthedocs.io/en/develop/UsersGuide/BuildingRunningTesting/Quickstart.html For a debugging guide for users and developers in the field of Earth System Modeling, please see: https://epic.noaa.gov/wp-content/uploads/2022/12/Debugging-Guide.pdf diff --git a/doc/README b/doc/README index 0ad8948eda..017f865384 100644 --- a/doc/README +++ b/doc/README @@ -20,10 +20,11 @@ Steps to build and use the Sphinx documentation tool: To build html: -$ cd ufs-srweather-app/docs/UsersGuide -$ make clean && sphinx-build -b html source build +$ cd ufs-srweather-app/doc +$ make clean && sphinx-build -b html . build The "make html" command can often be used in place of the previous command. +"make doc" will both build the html and run the linkchecker. Sphinx uses Latex to export the documentation as a PDF file. To build pdf: diff --git a/doc/requirements.txt b/doc/requirements.txt index 90efd3211e..e6d38a4eb8 100644 --- a/doc/requirements.txt +++ b/doc/requirements.txt @@ -2,13 +2,13 @@ # This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pip-compile requirements.in +# pip-compile --strip-extras requirements.in # alabaster==0.7.16 # via sphinx babel==2.14.0 # via sphinx -certifi==2024.2.2 +certifi==2024.7.4 # via requests charset-normalizer==3.3.2 # via requests @@ -40,7 +40,7 @@ pygments==2.17.2 # via sphinx pyyaml==6.0.1 # via pybtex -requests==2.32.0 +requests==2.32.2 # via sphinx six==1.16.0 # via From c377164582ee071ce8b3921e10b2d0f100141887 Mon Sep 17 00:00:00 2001 From: Christina Holt <56881914+christinaholtNOAA@users.noreply.github.com> Date: Fri, 26 Jul 2024 11:26:41 -0600 Subject: [PATCH 36/42] [develop] Transition the var_defns bash file to YAML. (#1098) Use YAML for the configuration language at run time. --------- Co-authored-by: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com> Co-authored-by: Michael Kavulich Co-authored-by: michael.lueken --- .cicd/scripts/wrapper_srw_ftest.sh | 3 +- aqm_environment.yml | 2 +- .../CustomizingTheWorkflow/ConfigWorkflow.rst | 6 +- environment.yml | 2 +- jobs/JREGIONAL_CHECK_POST_OUTPUT | 19 +- jobs/JREGIONAL_GET_EXTRN_MDL_FILES | 64 +++-- jobs/JREGIONAL_GET_VERIF_OBS | 18 +- jobs/JREGIONAL_INTEGRATION_TEST | 31 ++- jobs/JREGIONAL_MAKE_GRID | 114 ++------ jobs/JREGIONAL_MAKE_ICS | 30 +- jobs/JREGIONAL_MAKE_LBCS | 28 +- jobs/JREGIONAL_MAKE_OROG | 26 +- jobs/JREGIONAL_MAKE_SFC_CLIMO | 29 +- jobs/JREGIONAL_PLOT_ALLVARS | 49 +++- jobs/JREGIONAL_RUN_FCST | 25 +- ...EGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT | 15 +- ...JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX | 18 +- ...L_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSMEAN | 16 +- ...L_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB | 16 +- jobs/JREGIONAL_RUN_MET_PB2NC_OBS | 16 +- jobs/JREGIONAL_RUN_MET_PCPCOMBINE | 17 +- jobs/JREGIONAL_RUN_POST | 42 ++- jobs/JREGIONAL_RUN_PRDGEN | 33 ++- jobs/JSRW_AQM_ICS | 6 +- jobs/JSRW_AQM_LBCS | 7 +- jobs/JSRW_BIAS_CORRECTION_O3 | 7 +- jobs/JSRW_BIAS_CORRECTION_PM25 | 7 +- jobs/JSRW_FIRE_EMISSION | 6 +- jobs/JSRW_NEXUS_EMISSION | 6 +- jobs/JSRW_NEXUS_GFS_SFC | 8 +- jobs/JSRW_NEXUS_POST_SPLIT | 6 +- jobs/JSRW_POINT_SOURCE | 7 +- jobs/JSRW_POST_STAT_O3 | 7 +- jobs/JSRW_POST_STAT_PM25 | 7 +- jobs/JSRW_PRE_POST_STAT | 6 +- .../tasks/cheyenne/plot_allvars.local.lua | 2 +- .../tasks/derecho/plot_allvars.local.lua | 2 +- modulefiles/tasks/gaea/plot_allvars.local.lua | 2 +- modulefiles/tasks/hera/plot_allvars.local.lua | 2 +- .../tasks/hercules/plot_allvars.local.lua | 2 +- modulefiles/tasks/jet/plot_allvars.local.lua | 2 +- .../tasks/noaacloud/plot_allvars.local.lua | 7 +- .../tasks/orion/plot_allvars.local.lua | 2 +- parm/wflow/aqm_post.yaml | 10 +- parm/wflow/aqm_prep.yaml | 16 +- parm/wflow/coldstart.yaml | 10 +- parm/wflow/default_workflow.yaml | 2 +- parm/wflow/plot.yaml | 2 +- parm/wflow/post.yaml | 2 +- parm/wflow/prdgen.yaml | 2 +- parm/wflow/prep.yaml | 6 +- parm/wflow/test.yaml | 2 +- parm/wflow/verify_det.yaml | 8 +- parm/wflow/verify_ens.yaml | 16 +- parm/wflow/verify_pre.yaml | 18 +- scripts/exregional_check_post_output.sh | 44 ++- scripts/exregional_get_extrn_mdl_files.sh | 72 ++++- scripts/exregional_get_verif_obs.sh | 27 +- scripts/exregional_integration_test.py | 6 +- scripts/exregional_make_grid.sh | 111 +++++++- scripts/exregional_make_ics.sh | 92 +++++- scripts/exregional_make_lbcs.sh | 88 +++++- scripts/exregional_make_orog.sh | 262 ++++++++++-------- scripts/exregional_make_sfc_climo.sh | 51 +++- scripts/exregional_run_fcst.sh | 169 +++++++++-- ...onal_run_met_genensprod_or_ensemblestat.sh | 6 +- ...gional_run_met_gridstat_or_pointstat_vx.sh | 10 +- ...un_met_gridstat_or_pointstat_vx_ensmean.sh | 6 +- ...un_met_gridstat_or_pointstat_vx_ensprob.sh | 6 +- scripts/exregional_run_met_pb2nc_obs.sh | 6 +- scripts/exregional_run_met_pcpcombine.sh | 10 +- scripts/exregional_run_post.sh | 76 ++++- scripts/exregional_run_prdgen.sh | 8 +- scripts/exsrw_aqm_ics.sh | 5 +- scripts/exsrw_aqm_lbcs.sh | 12 +- scripts/exsrw_bias_correction_o3.sh | 8 +- scripts/exsrw_bias_correction_pm25.sh | 8 +- scripts/exsrw_fire_emission.sh | 5 +- scripts/exsrw_nexus_emission.sh | 6 +- scripts/exsrw_nexus_gfs_sfc.sh | 7 +- scripts/exsrw_nexus_post_split.sh | 5 +- scripts/exsrw_point_source.sh | 6 +- scripts/exsrw_post_stat_o3.sh | 6 +- scripts/exsrw_post_stat_pm25.sh | 6 +- scripts/exsrw_pre_post_stat.sh | 6 +- tests/WE2E/utils.py | 6 +- tests/test_python/test_retrieve_data.py | 58 ---- ush/bash_utils/check_var_valid_value.sh | 2 +- ush/bash_utils/create_symlink_to_file.sh | 1 + ush/bash_utils/print_msg.sh | 2 +- ush/bash_utils/source_config.sh | 53 ---- ush/bash_utils/source_yaml.sh | 36 +++ ush/config_defaults.yaml | 16 +- ush/create_aqm_rc_file.py | 4 +- ush/create_diag_table_file.py | 4 +- ush/create_model_configure_file.py | 4 +- ush/create_ufs_configure_file.py | 4 +- ush/generate_FV3LAM_wflow.py | 26 +- ush/job_preamble.sh | 7 +- ush/launch_FV3LAM_wflow.sh | 50 +--- ush/link_fix.py | 4 +- ush/load_modules_run_task.sh | 129 +++++---- ush/machine/hera.yaml | 4 +- ush/set_fv3nml_ens_stoch_seeds.py | 8 +- ush/set_fv3nml_sfc_climo_filenames.py | 8 +- ush/setup.py | 11 +- ush/source_util_funcs.sh | 6 +- ush/update_input_nml.py | 4 +- ush/wrappers/run_fcst.sh | 7 +- ush/wrappers/run_get_ics.sh | 7 +- ush/wrappers/run_get_lbcs.sh | 7 +- ush/wrappers/run_make_grid.sh | 7 +- ush/wrappers/run_make_ics.sh | 7 +- ush/wrappers/run_make_lbcs.sh | 7 +- ush/wrappers/run_make_orog.sh | 7 +- ush/wrappers/run_make_sfc_climo.sh | 7 +- ush/wrappers/run_post.sh | 7 +- 117 files changed, 1748 insertions(+), 706 deletions(-) delete mode 100644 ush/bash_utils/source_config.sh create mode 100644 ush/bash_utils/source_yaml.sh mode change 100755 => 100644 ush/launch_FV3LAM_wflow.sh diff --git a/.cicd/scripts/wrapper_srw_ftest.sh b/.cicd/scripts/wrapper_srw_ftest.sh index 950ceb7a34..ee26edadaf 100755 --- a/.cicd/scripts/wrapper_srw_ftest.sh +++ b/.cicd/scripts/wrapper_srw_ftest.sh @@ -24,7 +24,8 @@ fi if [[ "${SRW_PLATFORM}" == gaea ]]; then sed -i '15i #SBATCH --clusters=c5' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh sed -i 's|qos=batch|qos=normal|g' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh - sed -i 's|${JOBSdir}/JREGIONAL_RUN_POST|$USHdir/load_modules_run_task.sh "run_post" ${JOBSdir}/JREGIONAL_RUN_POST|g' ${WORKSPACE}/${SRW_PLATFORM}/ush/wrappers/run_post.sh + sed -i 's|00:30:00|00:45:00|g' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh + sed -i 's|${JOBSdir}/JREGIONAL_RUN_POST|$USHdir/load_modules_run_task.sh "gaea" "run_post" ${JOBSdir}/JREGIONAL_RUN_POST|g' ${WORKSPACE}/${SRW_PLATFORM}/ush/wrappers/run_post.sh fi if [[ "${SRW_PLATFORM}" == hera ]]; then diff --git a/aqm_environment.yml b/aqm_environment.yml index afd8a7b634..11bf9e57e3 100644 --- a/aqm_environment.yml +++ b/aqm_environment.yml @@ -9,5 +9,5 @@ dependencies: - pylint=2.17* - pytest=7.2* - scipy=1.10.* - - uwtools=2.1* + - uwtools=2.3* - xarray=2022.11.* diff --git a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst index 52cce90c2c..5161268980 100644 --- a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst +++ b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst @@ -293,7 +293,7 @@ WORKFLOW Configuration Parameters If non-default parameters are selected for the variables in this section, they should be added to the ``workflow:`` section of the ``config.yaml`` file. -``WORKFLOW_ID``: (Default: ``!nowtimestamp ''``) +``WORKFLOW_ID``: (Default: ``''``) Unique ID for the workflow run that will be set in ``setup.py``. ``RELATIVE_LINK_FLAG``: (Default: "--relative") @@ -458,8 +458,8 @@ This section contains files and paths to files that are staged in the experiment ``WFLOW_XML_FN``: (Default: "FV3LAM_wflow.xml") Name of the Rocoto workflow XML file that the experiment generation script creates. This file defines the workflow for the experiment. -``GLOBAL_VAR_DEFNS_FN``: (Default: "var_defns.sh") - Name of the file (a shell script) containing definitions of the primary and secondary experiment variables (parameters). This file is sourced by many scripts (e.g., the J-job scripts corresponding to each workflow task) in order to make all the experiment variables available in those scripts. The primary variables are defined in the default configuration file (``config_defaults.yaml``) and in the user configuration file (``config.yaml``). The secondary experiment variables are generated by the experiment generation script. +``GLOBAL_VAR_DEFNS_FN``: (Default: "var_defns.yaml") + Name of the auto-generated experiment configuration file. It contains the primary experiment variables defined in this default configuration script and in the user-specified configuration as well as secondary experiment variables generated by the experiment generation script from machine files and other settings. This file is the primary source of information used in the scripts at run time. ``ROCOTO_YAML_FN``: (Default: "rocoto_defns.yaml") Name of the YAML file containing the YAML workflow definition from which the Rocoto XML file is created. diff --git a/environment.yml b/environment.yml index e2dd6b8300..a735213198 100644 --- a/environment.yml +++ b/environment.yml @@ -5,4 +5,4 @@ channels: dependencies: - pylint=2.17* - pytest=7.2* - - uwtools=2.2* + - uwtools=2.3* diff --git a/jobs/JREGIONAL_CHECK_POST_OUTPUT b/jobs/JREGIONAL_CHECK_POST_OUTPUT index f55f730cf4..358b1fad72 100755 --- a/jobs/JREGIONAL_CHECK_POST_OUTPUT +++ b/jobs/JREGIONAL_CHECK_POST_OUTPUT @@ -3,7 +3,22 @@ # #----------------------------------------------------------------------- # +# The J-Job script for checking the post output. # +# Run-time environment variables: +# +# CDATE +# ENSMEM_INDX +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# workflow: +# EXPTDIR # #----------------------------------------------------------------------- # @@ -16,7 +31,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_GET_EXTRN_MDL_FILES b/jobs/JREGIONAL_GET_EXTRN_MDL_FILES index 80366f0ddc..fbd582201a 100755 --- a/jobs/JREGIONAL_GET_EXTRN_MDL_FILES +++ b/jobs/JREGIONAL_GET_EXTRN_MDL_FILES @@ -3,20 +3,48 @@ # #----------------------------------------------------------------------- # -# This script gets either from the system directory or from mass store -# (HPSS) the files generated by the external model (specified by the -# variable EXTRN_MDL_NAME) for either the initial conditions (ICs) or the -# lateral boundary conditions (LBCs). Which of these we are considering -# depends on the value of the variable ICS_OR_LBCS, which should be defined -# in the environment (when calling this script from a rocoto workflow, -# the workflow should define this variable, e.g. using rocoto's -# tag). -# -# Note that when we refer to ICs, we are referring to not only the atmospheric -# fields at the initial time but also various surface fields (which are -# for now time-independent) as well as the 0-th forecast hour LBCs. Also, -# when we refer to LBCs, we are referring to the LBCs excluding the one -# at the 0-th hour. +# The J-Job script for getting the model files that will be used for +# either initial conditions or lateral boundary conditions for the +# experiment. +# +# Run-time environment variables: +# +# CDATE +# COMIN +# cyc +# DATA +# EXTRN_MDL_STAGING_DIR +# GLOBAL_VAR_DEFNS_FP +# ICS_OR_LBCS +# PDY +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# workflow: +# EXPTDIR +# +# task_get_extrn_lbcs: +# EXTRN_MDL_FILES_LBCS +# EXTRN_MDL_LBCS_OFFSET_HRS +# EXTRN_MDL_NAME_LBCS +# EXTRN_MDL_SOURCE_BASEDIR_LBCS +# EXTRN_MDL_SYSBASEDIR_LBCS +# FV3GFS_FILE_FMT_LBCS +# LBC_SPEC_INTVL_HRS +# USE_USER_STAGED_EXTRN_FILES +# +# task_get_extrn_ics: +# EXTRN_MDL_FILES_ICS +# EXTRN_MDL_ICS_OFFSET_HRS +# EXTRN_MDL_NAME_ICS +# EXTRN_MDL_SOURCE_BASEDIR_ICS +# EXTRN_MDL_SYSBASEDIR_ICS +# FV3GFS_FILE_FMT_ICS +# USE_USER_STAGED_EXTRN_FILES # #----------------------------------------------------------------------- # @@ -29,8 +57,12 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_get_extrn_ics|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh "TRUE" +for sect in user nco workflow task_get_extrn_lbcs task_get_extrn_ics ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done +. $USHdir/job_preamble.sh + + # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_GET_VERIF_OBS b/jobs/JREGIONAL_GET_VERIF_OBS index 3820a739db..7c083e96c6 100755 --- a/jobs/JREGIONAL_GET_VERIF_OBS +++ b/jobs/JREGIONAL_GET_VERIF_OBS @@ -3,7 +3,19 @@ # #----------------------------------------------------------------------- # -# This script checks, pulls, and stages observation data for model verification. +# The J-Job script that checks, pulls, and stages observation data for +# model verification. +# +# Run-time environment variables: +# +# CDATE +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -16,7 +28,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task " " ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_INTEGRATION_TEST b/jobs/JREGIONAL_INTEGRATION_TEST index cbb93e86cf..983981ecf3 100755 --- a/jobs/JREGIONAL_INTEGRATION_TEST +++ b/jobs/JREGIONAL_INTEGRATION_TEST @@ -1,5 +1,31 @@ #!/bin/bash + +# +#----------------------------------------------------------------------- +# +# This J-Job script runs a set of tests at the end of WE2E tests. +# +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# CDATE +# FCST_DIR +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# RUN_ENV +# SCRIPTSdir +# USHdir +# +# workflow: +# FCST_LEN_HRS +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,8 +34,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_integration_test|task_run_fcst" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh + # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_MAKE_GRID b/jobs/JREGIONAL_MAKE_GRID index 8d65540d1c..01484041e9 100755 --- a/jobs/JREGIONAL_MAKE_GRID +++ b/jobs/JREGIONAL_MAKE_GRID @@ -3,97 +3,25 @@ # #----------------------------------------------------------------------- # -# This script generates grid and orography files in NetCDF format that -# are required as inputs for running the FV3-LAM model (i.e. the FV3 mo- -# del on a regional domain). It in turn calls three other scripts whose -# file names are specified in the variables grid_gen_scr, orog_gen_scr, -# and orog_fltr_scr and then calls the executable defined in the varia- -# ble shave_exec. These scripts/executable perform the following tasks: -# -# 1) grid_gen_scr: -# -# This script generates grid files that will be used by subsequent -# preprocessing steps. It places its output in the directory defined -# by GRID_DIR. Note that: -# -# a) This script creates grid files for each of the 7 tiles of the -# cubed sphere grid (where tiles 1 through 6 cover the globe, and -# tile 7 is the regional grid located somewhere within tile 6) -# even though the forecast will be performed only on tile 7. -# -# b) The tile 7 grid file that this script creates includes a halo, -# i.e. a layer of cells beyond the boundary of tile 7). The width -# of this halo (i.e. the number of cells in the halo in the direc- -# tion perpendicular to the boundary of the tile) must be made -# large enough such that the "shave" steps later below (which take -# this file as input and generate grid files with thinner halos) -# have a wide enough starting halo to work with. More specifical- -# ly, the FV3-LAM model needs as inputs two grid files: one with a -# halo that is 3 cells and another with a halo that is 4 cells -# wide. Thus, the halo in the grid file that the grid_gen_scr -# script generates must be greater than 4 since otherwise, the -# shave steps would shave off cells from within the interior of -# tile 7. We will let NHW denote the width of the halo in the -# grid file generated by grid_gen_scr. The "n" in this variable -# name denotes number of cells, the "h" is used to indicate that -# it refers to a halo region, the "w" is used to indicate that it -# refers to a wide halo (i.e. wider than the 3-cell and 4-cell ha- -# los that the FV3-LAM model requires as inputs, and the "T7" is -# used to indicate that the cell count is on tile 7. -# -# 2) orog_gen_scr: -# -# This script generates the orography file. It places its output in -# the directory defined by OROG_DIR. Note that: -# -# a) This script generates an orography file only on tile 7. -# -# b) This orography file contains a halo of the same width (NHW) -# as the grid file for tile 7 generated by the grid_gen_scr script -# in the previous step. -# -# 3) orog_fltr_scr: -# -# This script generates a filtered version of the orography file ge- -# nerated by the script orog_gen_scr. This script places its output -# in the temporary directory defined in WORKDIR_FLTR. Note that: -# -# a) The filtered orography file generated by this script contains a -# halo of the same width (NHW) as the (unfiltered) orography file -# generated by script orog_gen_scr (and the grid file generated by -# grid_gen_scr). -# -# b) In analogy with the input grid files, the FV3-LAM model needs as -# input two (filtered) orography files -- one with no halo cells -# and another with 3. These are obtained later below by "shaving" -# off layers of halo cells from the (filtered) orography file ge- -# nerated in this step. -# -# 4) shave_exec: -# -# This "shave" executable is called 4 times to generate 4 files from -# the tile 7 grid file generated by grid_gen_scr and the tile 7 fil- -# tered orography file generated by orog_fltr_scr (both of which have -# a halo of width NHW cells). The 4 output files are placed in the -# temporary directory defined in WORKDIR_SHVE. More specifically: -# -# a) shave_exec is called to shave the halo in the tile 7 grid file -# generated by grid_gen_scr down to a width of 3 cells and store -# the result in a new grid file in WORKDIR_SHVE. -# -# b) shave_exec is called to shave the halo in the tile 7 grid file -# generated by grid_gen_scr down to a width of 4 cells and store -# the result in a new grid file in WORKDIR_SHVE. -# -# c) shave_exec is called to shave the halo in the tile 7 filtered -# orography file generated by orog_fltr_scr down to a width of 0 -# cells (i.e. no halo) and store the result in a new filtered oro- -# graphy file in WORKDIR_SHVE. -# -# d) shave_exec is called to shave the halo in the tile 7 filtered -# orography file generated by orog_fltr_scr down to a width of 4 -# cells and store the result in a new filtered orography file in -# WORKDIR_SHVE. +# The J-Job that generates input NetCDF grid files for running the +# regional configuration of FV3 +# +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# DATA +# +# Experiment variables +# +# user: +# USHdir +# SCRIPTSdir +# +# workflow: +# PREEXISTING_DIR_METHOD +# +# task_make_grid: +# GRID_DIR # #----------------------------------------------------------------------- # @@ -106,7 +34,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_grid" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow task_make_grid ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_ICS b/jobs/JREGIONAL_MAKE_ICS index c4fb429f1b..10a3b36fb7 100755 --- a/jobs/JREGIONAL_MAKE_ICS +++ b/jobs/JREGIONAL_MAKE_ICS @@ -1,5 +1,31 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The J-job to run chgres_cube for preparing initial conditions for the +# FV3 forecast +# +# Run-time environment variables: +# +# COMIN +# DATA +# GLOBAL_VAR_DEFNS_FP +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# RUN_ENVIR +# SCRIPTSdir +# USHdir +# +# workflow: +# EXPTDIR +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +34,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_ics" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_LBCS b/jobs/JREGIONAL_MAKE_LBCS index 81e2578fd4..91d9d3edbe 100755 --- a/jobs/JREGIONAL_MAKE_LBCS +++ b/jobs/JREGIONAL_MAKE_LBCS @@ -1,5 +1,29 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The J-job to run chgres_cube for preparing lateral boundary conditions +# for the FV3 forecast +# +# Run-time environment variables: +# +# CDATE +# COMIN +# DATA +# GLOBAL_VAR_DEFNS_FP +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# RUN_ENVIR +# SCRIPTSdir +# USHdir +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +32,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_lbcs" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_OROG b/jobs/JREGIONAL_MAKE_OROG index b6f674e5ee..28e2f965a5 100755 --- a/jobs/JREGIONAL_MAKE_OROG +++ b/jobs/JREGIONAL_MAKE_OROG @@ -1,5 +1,27 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The J-Job that generates input NetCDF orography files for running the +# regional configuration of FV3 +# +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# task_make_orog: +# OROG_DIR +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +30,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_orog" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow task_make_orog ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_SFC_CLIMO b/jobs/JREGIONAL_MAKE_SFC_CLIMO index 7cbd0cc23e..30b2d2c346 100755 --- a/jobs/JREGIONAL_MAKE_SFC_CLIMO +++ b/jobs/JREGIONAL_MAKE_SFC_CLIMO @@ -1,5 +1,30 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The J-job to run chgres_cube for preparing lateral boundary conditions +# for the FV3 forecast +# +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# workflow: +# PREEXISTING_DIR_METHOD +# +# task_make_sfc_climo: +# SFC_CLIMO_DIR +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +33,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_sfc_climo" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow task_make_sfc_climo ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_PLOT_ALLVARS b/jobs/JREGIONAL_PLOT_ALLVARS index 5e59abd93d..be5ee10f82 100755 --- a/jobs/JREGIONAL_PLOT_ALLVARS +++ b/jobs/JREGIONAL_PLOT_ALLVARS @@ -1,5 +1,45 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The J-job to plot the forecast output +# +# Run-time environment variables: +# +# CDATE +# COMOUT +# GLOBAL_VAR_DEFNS_FP +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# RUN_ENVIR +# SCRIPTSdir +# USHdir +# +# platform: +# FIXshp +# +# workflow: +# EXPT_SUBDIR +# PREEXISTING_DIR_METHOD +# PREDEF_GRID_NAME +# +# task_plot_allvars: +# COMOUT_REF +# PLOT_DOMAINS +# PLOT_FCST_END +# PLOT_FCST_INC +# PLOT_FCST_START +# +# task_run_fcst: +# FCST_LEN_HRS +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +48,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_plot_allvars|task_run_fcst" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow task_plot_allvars task_run_fcst ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -67,6 +109,11 @@ COMOUT_REF=$(eval echo ${COMOUT_REF}) #----------------------------------------------------------------------- # +if [ -n "${SRW_GRAPHICS_ENV:-}" ] ; then + set +u + conda activate ${SRW_GRAPHICS_ENV} + set -u +fi # plot all variables $SCRIPTSdir/exregional_plot_allvars.py \ --cycle ${CDATE} \ diff --git a/jobs/JREGIONAL_RUN_FCST b/jobs/JREGIONAL_RUN_FCST index 45f826c0d7..2542ab32f8 100755 --- a/jobs/JREGIONAL_RUN_FCST +++ b/jobs/JREGIONAL_RUN_FCST @@ -3,9 +3,24 @@ # #----------------------------------------------------------------------- # -# This script copies files from various directories into the experiment -# directory, creates links to some of them, and modifies others (e.g. -# templates) to customize them for the current experiment setup. +# The J-Job that runs the forecast +# +# Run-time environment variables: +# +# CDATE +# COMIN +# DATA +# GLOBAL_VAR_DEFNS_FP +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# workflow: +# RUN_ENVIR # #----------------------------------------------------------------------- # @@ -18,7 +33,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_fcst" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh "TRUE" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT b/jobs/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT index 707697b5ab..c7aee12df1 100755 --- a/jobs/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT +++ b/jobs/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT @@ -3,7 +3,18 @@ # #----------------------------------------------------------------------- # +# The J-Job that runs that runs either METplus's gen_ens_prod tool or its +# ensemble_stat tool for ensemble verification. # +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -16,7 +27,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_ensgrid|task_run_vx_enspoint" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX index 0301e9946a..e1207e0a81 100755 --- a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX +++ b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX @@ -3,8 +3,18 @@ # #----------------------------------------------------------------------- # -# This script runs the METplus GridStat or PointStat tool for deterministic -# verification. +# This script runs the METplus GridStat or PointStat tool for +# deterministic verification. +# +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -17,7 +27,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_gridstat" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSMEAN b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSMEAN index ab08320f33..29b22502a4 100755 --- a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSMEAN +++ b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSMEAN @@ -3,7 +3,19 @@ # #----------------------------------------------------------------------- # +# The J-Job that runs MET/METplus's GridStat or PointStat tool to +# perform verification on the ensemble mean of a specified field (or +# group of fields). # +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -16,7 +28,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_ensgrid_mean" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB index 7da98212ac..731cf575a5 100755 --- a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB +++ b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB @@ -3,7 +3,19 @@ # #----------------------------------------------------------------------- # +# The J-Job that runs METplus's GridStat or PointStat tool to perform +# verification on the ensemble frequencies/ probabilities of a specified +# field (or group of fields). # +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -16,7 +28,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_ensgrid_prob" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_MET_PB2NC_OBS b/jobs/JREGIONAL_RUN_MET_PB2NC_OBS index 2767ae1146..89c9bb73f4 100755 --- a/jobs/JREGIONAL_RUN_MET_PB2NC_OBS +++ b/jobs/JREGIONAL_RUN_MET_PB2NC_OBS @@ -4,6 +4,18 @@ #----------------------------------------------------------------------- # # +# The J-Job that runs METplus for point-stat by initialization time for +# all forecast hours. +# +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -16,7 +28,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_met_pb2nc_obs" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_MET_PCPCOMBINE b/jobs/JREGIONAL_RUN_MET_PCPCOMBINE index 7364ed96c9..8ac29887e8 100755 --- a/jobs/JREGIONAL_RUN_MET_PCPCOMBINE +++ b/jobs/JREGIONAL_RUN_MET_PCPCOMBINE @@ -3,7 +3,20 @@ # #----------------------------------------------------------------------- # +# The J-job that runs the MET/METplus PcpCombine tool on hourly +# accumulated precipitation (APCP) data to obtain APCP for multi-hour +# accumulation periods. The data can be from CCPA observations or a +# focrecast. # +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -16,7 +29,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_met_pcpcombine" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_POST b/jobs/JREGIONAL_RUN_POST index 692b3ae65d..58c469fc6d 100755 --- a/jobs/JREGIONAL_RUN_POST +++ b/jobs/JREGIONAL_RUN_POST @@ -3,8 +3,38 @@ # #----------------------------------------------------------------------- # -# This script runs the post-processor (UPP) on the NetCDF output files -# of the write component of the FV3-LAM model. +# The J-Job that runs the Unified Post-processor (UPP) on the NetCDF +# output from FV3. +# +# Run-time environment variables: +# +# COMIN +# COMOUT +# cyc +# DATA +# DATAROOT +# GLOBAL_VAR_DEFNS_FP +# PDY +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# platform: +# WORKFLOW_MANAGER +# +# workflow: +# DATE_FIRST_CYCL +# FCST_LEN_CYCL +# FCST_LEN_HRS +# INCR_CYCL_FREQ +# RUN_ENVIR +# +# task_run_post: +# SUB_HOURLY_POST # #----------------------------------------------------------------------- # @@ -17,7 +47,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_post|task_run_fcst" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -60,7 +92,7 @@ on the output files corresponding to a specified forecast hour. # minutes (fmn) are set to "00". This is necessary in order to pass # "fmn" into the post ex-script for the calculation of post_time. # -if [ "${SUB_HOURLY_POST}" != "TRUE" ]; then +if [ $(boolify "${SUB_HOURLY_POST}") != "TRUE" ]; then export fmn="00" fi # @@ -88,7 +120,7 @@ if [ "${RUN_ENVIR}" = "community" ]; then mkdir -p "${COMOUT}" fi -if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then +if [ $(boolify "${SUB_HOURLY_POST}") = "TRUE" ]; then export DATA_FHR="${DATA:-$COMOUT}/$fhr$fmn" else export DATA_FHR="${DATA:-$COMOUT}/$fhr" diff --git a/jobs/JREGIONAL_RUN_PRDGEN b/jobs/JREGIONAL_RUN_PRDGEN index 24479cb62d..1cf933b666 100755 --- a/jobs/JREGIONAL_RUN_PRDGEN +++ b/jobs/JREGIONAL_RUN_PRDGEN @@ -3,10 +3,33 @@ # #----------------------------------------------------------------------- # -# This script runs wgrib2 to create various subdomain GRIB2 files from -# the raw UPP-generated GRIB2 output from the run_post task of the +# The J-Job that runs wgrib2 to create various subdomain GRIB2 files +# from the raw UPP-generated GRIB2 output from the run_post task of the # FV3-LAM model. # +# Run-time environment variables: +# +# COMIN +# COMOUT +# DATA +# GLOBAL_VAR_DEFNS_FP +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# platform: +# WORKFLOW_MANAGER +# +# workflow: +# RUN_ENVIR +# +# task_run_post: +# SUB_HOURLY_POST +# #----------------------------------------------------------------------- # @@ -18,7 +41,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_prdgen|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -87,7 +112,7 @@ fi mkdir -p "${COMOUT}" # subhourly post -if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then +if [ $(boolify "${SUB_HOURLY_POST}") = "TRUE" ]; then export DATA_FHR="${DATA:-$COMOUT}/$fhr$fmn" else export DATA_FHR="${DATA:-$COMOUT}/$fhr" diff --git a/jobs/JSRW_AQM_ICS b/jobs/JSRW_AQM_ICS index 0c4df8aa5b..5d5f6d970e 100755 --- a/jobs/JSRW_AQM_ICS +++ b/jobs/JSRW_AQM_ICS @@ -31,7 +31,9 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_aqm_ics" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -130,7 +132,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_AQM_LBCS b/jobs/JSRW_AQM_LBCS index 11a1420d5e..9279dbe190 100755 --- a/jobs/JSRW_AQM_LBCS +++ b/jobs/JSRW_AQM_LBCS @@ -31,7 +31,10 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_get_extrn_lbcs|task_make_orog|task_make_lbcs|cpl_aqm_parm|task_aqm_lbcs" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm task_get_extrn_lbcs \ + task_make_orog task_make_lbcs ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -131,7 +134,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_BIAS_CORRECTION_O3 b/jobs/JSRW_BIAS_CORRECTION_O3 index 3ab2f2d40f..0849614840 100755 --- a/jobs/JSRW_BIAS_CORRECTION_O3 +++ b/jobs/JSRW_BIAS_CORRECTION_O3 @@ -30,7 +30,10 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_bias_correction_o3" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm task_run_post \ + task_bias_correction_o3 ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -123,7 +126,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_BIAS_CORRECTION_PM25 b/jobs/JSRW_BIAS_CORRECTION_PM25 index 42210e7f29..a0a7f76dad 100755 --- a/jobs/JSRW_BIAS_CORRECTION_PM25 +++ b/jobs/JSRW_BIAS_CORRECTION_PM25 @@ -30,7 +30,10 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_bias_correction_pm25" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm task_run_post \ + task_bias_correction_pm25 ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -123,7 +126,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_FIRE_EMISSION b/jobs/JSRW_FIRE_EMISSION index ae0343e60e..8a2b581274 100755 --- a/jobs/JSRW_FIRE_EMISSION +++ b/jobs/JSRW_FIRE_EMISSION @@ -30,7 +30,9 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_fire_emission" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -126,7 +128,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_NEXUS_EMISSION b/jobs/JSRW_NEXUS_EMISSION index 33f1aca757..aab5869cff 100755 --- a/jobs/JSRW_NEXUS_EMISSION +++ b/jobs/JSRW_NEXUS_EMISSION @@ -30,7 +30,9 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_run_fcst|cpl_aqm_parm|task_nexus_emission" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm task_nexus_emission ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -128,7 +130,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_NEXUS_GFS_SFC b/jobs/JSRW_NEXUS_GFS_SFC index 89d84c740d..ceed6be32a 100755 --- a/jobs/JSRW_NEXUS_GFS_SFC +++ b/jobs/JSRW_NEXUS_GFS_SFC @@ -30,7 +30,9 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_nexus_gfs_sfc" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -128,7 +130,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= @@ -143,7 +145,7 @@ fi if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_NEXUS_POST_SPLIT b/jobs/JSRW_NEXUS_POST_SPLIT index 6e5a0a259a..10f4101d5c 100755 --- a/jobs/JSRW_NEXUS_POST_SPLIT +++ b/jobs/JSRW_NEXUS_POST_SPLIT @@ -30,7 +30,9 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_nexus_post_split" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -128,7 +130,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_POINT_SOURCE b/jobs/JSRW_POINT_SOURCE index a112a2d275..6218acaa99 100755 --- a/jobs/JSRW_POINT_SOURCE +++ b/jobs/JSRW_POINT_SOURCE @@ -30,7 +30,10 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_run_fcst|cpl_aqm_parm|task_point_source" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm task_point_source \ + task_run_fcst ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -123,7 +126,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_POST_STAT_O3 b/jobs/JSRW_POST_STAT_O3 index 8924cba9e5..5fadd70d30 100755 --- a/jobs/JSRW_POST_STAT_O3 +++ b/jobs/JSRW_POST_STAT_O3 @@ -30,7 +30,10 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_o3" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -124,7 +127,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_POST_STAT_PM25 b/jobs/JSRW_POST_STAT_PM25 index 83434fa8c7..2d7d6e9e88 100755 --- a/jobs/JSRW_POST_STAT_PM25 +++ b/jobs/JSRW_POST_STAT_PM25 @@ -30,7 +30,10 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_pm25" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -123,7 +126,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/jobs/JSRW_PRE_POST_STAT b/jobs/JSRW_PRE_POST_STAT index 12561085c2..8c51e18510 100755 --- a/jobs/JSRW_PRE_POST_STAT +++ b/jobs/JSRW_PRE_POST_STAT @@ -30,7 +30,9 @@ export SCRIPTSsrw="${HOMEdir}/scripts" # export USHdir="${USHsrw}" # should be removed later . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_pre_post_stat" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -127,7 +129,7 @@ setpdy.sh if [ ${subcyc} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" fi -if [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= diff --git a/modulefiles/tasks/cheyenne/plot_allvars.local.lua b/modulefiles/tasks/cheyenne/plot_allvars.local.lua index b49b8bb863..7cee04231e 100644 --- a/modulefiles/tasks/cheyenne/plot_allvars.local.lua +++ b/modulefiles/tasks/cheyenne/plot_allvars.local.lua @@ -1,3 +1,3 @@ unload("python") load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/derecho/plot_allvars.local.lua b/modulefiles/tasks/derecho/plot_allvars.local.lua index b49b8bb863..7cee04231e 100644 --- a/modulefiles/tasks/derecho/plot_allvars.local.lua +++ b/modulefiles/tasks/derecho/plot_allvars.local.lua @@ -1,3 +1,3 @@ unload("python") load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/gaea/plot_allvars.local.lua b/modulefiles/tasks/gaea/plot_allvars.local.lua index 104da06f5c..41da34ecca 100644 --- a/modulefiles/tasks/gaea/plot_allvars.local.lua +++ b/modulefiles/tasks/gaea/plot_allvars.local.lua @@ -1,4 +1,4 @@ unload("python") load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/hera/plot_allvars.local.lua b/modulefiles/tasks/hera/plot_allvars.local.lua index b7e9528710..85291013c7 100644 --- a/modulefiles/tasks/hera/plot_allvars.local.lua +++ b/modulefiles/tasks/hera/plot_allvars.local.lua @@ -1,2 +1,2 @@ load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/hercules/plot_allvars.local.lua b/modulefiles/tasks/hercules/plot_allvars.local.lua index b49b8bb863..7cee04231e 100644 --- a/modulefiles/tasks/hercules/plot_allvars.local.lua +++ b/modulefiles/tasks/hercules/plot_allvars.local.lua @@ -1,3 +1,3 @@ unload("python") load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/jet/plot_allvars.local.lua b/modulefiles/tasks/jet/plot_allvars.local.lua index b7e9528710..85291013c7 100644 --- a/modulefiles/tasks/jet/plot_allvars.local.lua +++ b/modulefiles/tasks/jet/plot_allvars.local.lua @@ -1,2 +1,2 @@ load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/noaacloud/plot_allvars.local.lua b/modulefiles/tasks/noaacloud/plot_allvars.local.lua index b7e9528710..2fd9b41eb5 100644 --- a/modulefiles/tasks/noaacloud/plot_allvars.local.lua +++ b/modulefiles/tasks/noaacloud/plot_allvars.local.lua @@ -1,2 +1,5 @@ -load("conda") -setenv("SRW_ENV", "srw_graphics") +unload("python") +append_path("MODULEPATH","/contrib/EPIC/miniconda3/modulefiles") +load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) + +setenv("SRW_GRAPHICS_ENV", "regional_workflow") diff --git a/modulefiles/tasks/orion/plot_allvars.local.lua b/modulefiles/tasks/orion/plot_allvars.local.lua index b49b8bb863..7cee04231e 100644 --- a/modulefiles/tasks/orion/plot_allvars.local.lua +++ b/modulefiles/tasks/orion/plot_allvars.local.lua @@ -1,3 +1,3 @@ unload("python") load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/parm/wflow/aqm_post.yaml b/parm/wflow/aqm_post.yaml index 5f307184d3..48a0761fef 100644 --- a/parm/wflow/aqm_post.yaml +++ b/parm/wflow/aqm_post.yaml @@ -22,7 +22,7 @@ default_aqm_task: &default_aqm task_pre_post_stat: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "pre_post_stat" "&HOMEdir;/jobs/JSRW_PRE_POST_STAT"' + command: '&LOAD_MODULES_RUN_TASK; "pre_post_stat" "&HOMEdir;/jobs/JSRW_PRE_POST_STAT"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' dependency: or: @@ -36,7 +36,7 @@ task_pre_post_stat: task_post_stat_o3: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "post_stat_o3" "&HOMEdir;/jobs/JSRW_POST_STAT_O3"' + command: '&LOAD_MODULES_RUN_TASK; "post_stat_o3" "&HOMEdir;/jobs/JSRW_POST_STAT_O3"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' memory: 120G dependency: @@ -46,7 +46,7 @@ task_post_stat_o3: task_post_stat_pm25: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "post_stat_pm25" "&HOMEdir;/jobs/JSRW_POST_STAT_PM25"' + command: '&LOAD_MODULES_RUN_TASK; "post_stat_pm25" "&HOMEdir;/jobs/JSRW_POST_STAT_PM25"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' memory: 120G dependency: @@ -56,7 +56,7 @@ task_post_stat_pm25: task_bias_correction_o3: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "bias_correction_o3" "&HOMEdir;/jobs/JSRW_BIAS_CORRECTION_O3"' + command: '&LOAD_MODULES_RUN_TASK; "bias_correction_o3" "&HOMEdir;/jobs/JSRW_BIAS_CORRECTION_O3"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' memory: 120G dependency: @@ -66,7 +66,7 @@ task_bias_correction_o3: task_bias_correction_pm25: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "bias_correction_pm25" "&HOMEdir;/jobs/JSRW_BIAS_CORRECTION_PM25"' + command: '&LOAD_MODULES_RUN_TASK; "bias_correction_pm25" "&HOMEdir;/jobs/JSRW_BIAS_CORRECTION_PM25"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' memory: 120G dependency: diff --git a/parm/wflow/aqm_prep.yaml b/parm/wflow/aqm_prep.yaml index c57d2198f0..d90bbde60f 100644 --- a/parm/wflow/aqm_prep.yaml +++ b/parm/wflow/aqm_prep.yaml @@ -29,7 +29,7 @@ default_aqm_task: &default_aqm task_nexus_gfs_sfc: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_gfs_sfc" "&HOMEdir;/jobs/JSRW_NEXUS_GFS_SFC"' + command: '&LOAD_MODULES_RUN_TASK; "nexus_gfs_sfc" "&HOMEdir;/jobs/JSRW_NEXUS_GFS_SFC"' native: '{% if platform.get("SCHED_NATIVE_CMD_HPSS") %}{{ platform.SCHED_NATIVE_CMD_HPSS }}{% else %}{{ platform.SCHED_NATIVE_CMD}}{% endif %}' partition: '{% if platform.get("PARTITION_HPSS") %}&PARTITION_HPSS;{% else %}None{% endif %}' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' @@ -53,7 +53,7 @@ metatask_nexus_emission: nspt: '{% for h in range(0, cpl_aqm_parm.NUM_SPLIT_NEXUS) %}{{ " %02d" % h }}{% endfor %}' task_nexus_emission_#nspt#: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_emission" "&HOMEdir;/jobs/JSRW_NEXUS_EMISSION"' + command: '&LOAD_MODULES_RUN_TASK; "nexus_emission" "&HOMEdir;/jobs/JSRW_NEXUS_EMISSION"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' nnodes: '{{ task_nexus_emission.NNODES_NEXUS_EMISSION }}' ppn: '{{ task_nexus_emission.PPN_NEXUS_EMISSION // 1 }}' @@ -68,7 +68,7 @@ metatask_nexus_emission: task_nexus_post_split: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_post_split" "&HOMEdir;/jobs/JSRW_NEXUS_POST_SPLIT"' + command: '&LOAD_MODULES_RUN_TASK; "nexus_post_split" "&HOMEdir;/jobs/JSRW_NEXUS_POST_SPLIT"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' dependency: metataskdep: @@ -77,13 +77,13 @@ task_nexus_post_split: task_fire_emission: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "fire_emission" "&HOMEdir;/jobs/JSRW_FIRE_EMISSION"' + command: '&LOAD_MODULES_RUN_TASK; "fire_emission" "&HOMEdir;/jobs/JSRW_FIRE_EMISSION"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' memory: 2G task_point_source: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "point_source" "&HOMEdir;/jobs/JSRW_POINT_SOURCE"' + command: '&LOAD_MODULES_RUN_TASK; "point_source" "&HOMEdir;/jobs/JSRW_POINT_SOURCE"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' walltime: 01:00:00 dependency: @@ -101,7 +101,7 @@ task_aqm_ics_ext: attrs: cycledefs: at_start maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_ics" "&HOMEdir;/jobs/JSRW_AQM_ICS"' + command: '&LOAD_MODULES_RUN_TASK; "aqm_ics" "&HOMEdir;/jobs/JSRW_AQM_ICS"' envars: <<: *default_vars PREV_CYCLE_DIR: '&WARMSTART_CYCLE_DIR;' @@ -127,7 +127,7 @@ task_aqm_ics: attrs: cycledefs: cycled_from_second maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_ics" "&HOMEdir;/jobs/JSRW_AQM_ICS"' + command: '&LOAD_MODULES_RUN_TASK; "aqm_ics" "&HOMEdir;/jobs/JSRW_AQM_ICS"' envars: <<: *default_vars PREV_CYCLE_DIR: '&COMIN_DIR;' @@ -150,7 +150,7 @@ task_aqm_ics: task_aqm_lbcs: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_lbcs" "&HOMEdir;/jobs/JSRW_AQM_LBCS"' + command: '&LOAD_MODULES_RUN_TASK; "aqm_lbcs" "&HOMEdir;/jobs/JSRW_AQM_LBCS"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' ppn: 24 dependency: diff --git a/parm/wflow/coldstart.yaml b/parm/wflow/coldstart.yaml index ceefe865e6..6fad0b8d83 100644 --- a/parm/wflow/coldstart.yaml +++ b/parm/wflow/coldstart.yaml @@ -20,7 +20,7 @@ default_task: &default_task task_get_extrn_ics: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "get_extrn_ics" "&JOBSdir;/JREGIONAL_GET_EXTRN_MDL_FILES"' + command: '&LOAD_MODULES_RUN_TASK; "get_extrn_ics" "&JOBSdir;/JREGIONAL_GET_EXTRN_MDL_FILES"' attrs: cycledefs: forecast maxtries: '2' @@ -51,7 +51,7 @@ task_get_extrn_ics: task_get_extrn_lbcs: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "get_extrn_lbcs" "&JOBSdir;/JREGIONAL_GET_EXTRN_MDL_FILES"' + command: '&LOAD_MODULES_RUN_TASK; "get_extrn_lbcs" "&JOBSdir;/JREGIONAL_GET_EXTRN_MDL_FILES"' attrs: cycledefs: forecast maxtries: '2' @@ -85,7 +85,7 @@ metatask_run_ensemble: mem: '{% if global.DO_ENSEMBLE %}{%- for m in range(1, global.NUM_ENS_MEMBERS+1) -%}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' task_make_ics_mem#mem#: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "make_ics" "&JOBSdir;/JREGIONAL_MAKE_ICS"' + command: '&LOAD_MODULES_RUN_TASK; "make_ics" "&JOBSdir;/JREGIONAL_MAKE_ICS"' envars: <<: *default_vars SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' @@ -124,7 +124,7 @@ metatask_run_ensemble: task_make_lbcs_mem#mem#: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "make_lbcs" "&JOBSdir;/JREGIONAL_MAKE_LBCS"' + command: '&LOAD_MODULES_RUN_TASK; "make_lbcs" "&JOBSdir;/JREGIONAL_MAKE_LBCS"' envars: <<: *default_vars SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' @@ -142,7 +142,7 @@ metatask_run_ensemble: task_run_fcst_mem#mem#: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "run_fcst" "&JOBSdir;/JREGIONAL_RUN_FCST"' + command: '&LOAD_MODULES_RUN_TASK; "run_fcst" "&JOBSdir;/JREGIONAL_RUN_FCST"' envars: <<: *default_vars SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' diff --git a/parm/wflow/default_workflow.yaml b/parm/wflow/default_workflow.yaml index c79415b3be..e37fdae1ea 100644 --- a/parm/wflow/default_workflow.yaml +++ b/parm/wflow/default_workflow.yaml @@ -11,7 +11,7 @@ rocoto: HOMEdir: '{{ user.HOMEdir }}' JOBSdir: '{{ user.JOBSdir }}' KEEPDATA: '{{ nco.KEEPDATA_default }}' - LOAD_MODULES_RUN_TASK_FP: '{{ workflow.LOAD_MODULES_RUN_TASK_FP }}' + LOAD_MODULES_RUN_TASK: '{{ workflow.LOAD_MODULES_RUN_TASK_FP }} {{ user.MACHINE }}' LOGEXT: ".log" NET: '{{ nco.NET_default }}' MRMS_OBS_DIR: '{{ platform.MRMS_OBS_DIR }}' diff --git a/parm/wflow/plot.yaml b/parm/wflow/plot.yaml index 6dad3e0dfa..445d238c15 100644 --- a/parm/wflow/plot.yaml +++ b/parm/wflow/plot.yaml @@ -26,7 +26,7 @@ default_task_plot: &default_task task_plot_allvars: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "plot_allvars" "&JOBSdir;/JREGIONAL_PLOT_ALLVARS"' + command: '&LOAD_MODULES_RUN_TASK; "plot_allvars" "&JOBSdir;/JREGIONAL_PLOT_ALLVARS"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' dependency: or_do_post: &post_files_exist diff --git a/parm/wflow/post.yaml b/parm/wflow/post.yaml index 5672e7343f..114e5de377 100644 --- a/parm/wflow/post.yaml +++ b/parm/wflow/post.yaml @@ -3,7 +3,7 @@ default_task_post: &default_task attrs: cycledefs: '#cycledef#' maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_post" "&JOBSdir;/JREGIONAL_RUN_POST"' + command: '&LOAD_MODULES_RUN_TASK; "run_post" "&JOBSdir;/JREGIONAL_RUN_POST"' envars: &default_vars GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;' USHdir: '&USHdir;' diff --git a/parm/wflow/prdgen.yaml b/parm/wflow/prdgen.yaml index 6b9f7cd4f6..3f2026a45f 100644 --- a/parm/wflow/prdgen.yaml +++ b/parm/wflow/prdgen.yaml @@ -10,7 +10,7 @@ metatask_run_prdgen: attrs: cycledefs: '#cycledef#' maxtries: 1 - command: '&LOAD_MODULES_RUN_TASK_FP; "run_prdgen" "&JOBSdir;/JREGIONAL_RUN_PRDGEN"' + command: '&LOAD_MODULES_RUN_TASK; "run_prdgen" "&JOBSdir;/JREGIONAL_RUN_PRDGEN"' envars: GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;' USHdir: '&USHdir;' diff --git a/parm/wflow/prep.yaml b/parm/wflow/prep.yaml index c9d5549909..a0c6e3119a 100644 --- a/parm/wflow/prep.yaml +++ b/parm/wflow/prep.yaml @@ -24,12 +24,12 @@ default_task_prep: &default_task task_make_grid: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "make_grid" "&JOBSdir;/JREGIONAL_MAKE_GRID"' + command: '&LOAD_MODULES_RUN_TASK; "make_grid" "&JOBSdir;/JREGIONAL_MAKE_GRID"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' task_make_orog: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "make_orog" "&JOBSdir;/JREGIONAL_MAKE_OROG"' + command: '&LOAD_MODULES_RUN_TASK; "make_orog" "&JOBSdir;/JREGIONAL_MAKE_OROG"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' dependency: or: &make_grid_satisfied @@ -47,7 +47,7 @@ task_make_orog: task_make_sfc_climo: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "make_sfc_climo" "&JOBSdir;/JREGIONAL_MAKE_SFC_CLIMO"' + command: '&LOAD_MODULES_RUN_TASK; "make_sfc_climo" "&JOBSdir;/JREGIONAL_MAKE_SFC_CLIMO"' envars: <<: *default_envars join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' diff --git a/parm/wflow/test.yaml b/parm/wflow/test.yaml index 716665b228..9c084d6875 100644 --- a/parm/wflow/test.yaml +++ b/parm/wflow/test.yaml @@ -29,7 +29,7 @@ metatask_integration_test: mem: '{% if global.DO_ENSEMBLE %}{%- for m in range(1, global.NUM_ENS_MEMBERS+1) -%}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' task_integration_test_mem#mem#: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "integration_test" "&JOBSdir;/JREGIONAL_INTEGRATION_TEST"' + command: '&LOAD_MODULES_RUN_TASK; "integration_test" "&JOBSdir;/JREGIONAL_INTEGRATION_TEST"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' dependency: and_run_fcst: diff --git a/parm/wflow/verify_det.yaml b/parm/wflow/verify_det.yaml index e82d7c61e1..a62adb4481 100644 --- a/parm/wflow/verify_det.yaml +++ b/parm/wflow/verify_det.yaml @@ -31,7 +31,7 @@ metatask_GridStat_CCPA_all_accums_all_mems: <<: *default_task_verify_det attrs: maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' envars: <<: *default_vars OBS_DIR: '&CCPA_OBS_DIR;' @@ -63,7 +63,7 @@ metatask_GridStat_NOHRSC_all_accums_all_mems: <<: *default_task_verify_det attrs: maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' envars: <<: *default_vars OBS_DIR: '&NOHRSC_OBS_DIR;' @@ -93,7 +93,7 @@ metatask_GridStat_MRMS_all_mems: VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["REFC", "RETOP"] %}{{ "%s " % var }}{% endif %}{% endfor %}' task_run_MET_GridStat_vx_#VAR#_mem#mem#: <<: *default_task_verify_det - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' envars: <<: *default_vars OBS_DIR: '&MRMS_OBS_DIR;' @@ -124,7 +124,7 @@ metatask_PointStat_NDAS_all_mems: VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["ADPSFC", "ADPUPA"] %}{{ "%s " % var }}{% endif %}{% endfor %}' task_run_MET_PointStat_vx_#VAR#_mem#mem#: <<: *default_task_verify_det - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' envars: <<: *default_vars OBS_DIR: '&NDAS_OBS_DIR;' diff --git a/parm/wflow/verify_ens.yaml b/parm/wflow/verify_ens.yaml index 18b23a1eb0..71bc20b3b0 100644 --- a/parm/wflow/verify_ens.yaml +++ b/parm/wflow/verify_ens.yaml @@ -26,7 +26,7 @@ metatask_GenEnsProd_EnsembleStat_CCPA: ACCUM_HH: '{% for ah in verification.VX_APCP_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' task_run_MET_GenEnsProd_vx_APCP#ACCUM_HH#h: &task_GenEnsProd_CCPA <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' envars: &envars_GenEnsProd_CCPA <<: *default_vars ACCUM_HH: '#ACCUM_HH#' @@ -63,7 +63,7 @@ metatask_GenEnsProd_EnsembleStat_NOHRSC: ACCUM_HH: '{% for ah in verification.VX_ASNOW_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' task_run_MET_GenEnsProd_vx_ASNOW#ACCUM_HH#h: &task_GenEnsProd_NOHRSC <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' envars: &envars_GenEnsProd_NOHRSC <<: *default_vars ACCUM_HH: '#ACCUM_HH#' @@ -101,7 +101,7 @@ metatask_GenEnsProd_EnsembleStat_MRMS: VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["REFC", "RETOP"] %}{{ "%s " % var }}{% endif %}{% endfor %}' task_run_MET_GenEnsProd_vx_#VAR#: &task_GenEnsProd_MRMS <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' envars: &envars_GenEnsProd_MRMS <<: *default_vars ACCUM_HH: '01' @@ -137,7 +137,7 @@ metatask_GenEnsProd_EnsembleStat_NDAS: VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["ADPSFC", "ADPUPA"] %}{{ "%s " % var }}{% endif %}{% endfor %}' task_run_MET_GenEnsProd_vx_#VAR#: &task_GenEnsProd_NDAS <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' envars: &envars_GenEnsProd_NDAS <<: *default_vars OBS_DIR: '&NDAS_OBS_DIR;' @@ -178,7 +178,7 @@ metatask_GridStat_CCPA_ensmeanprob_all_accums: ACCUM_HH: '{% for ah in verification.VX_APCP_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' task_run_MET_GridStat_vx_ens#statlc#_APCP#ACCUM_HH#h: <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"' envars: <<: *default_vars OBS_DIR: '&CCPA_OBS_DIR;' @@ -202,7 +202,7 @@ metatask_GridStat_NOHRSC_ensmeanprob_all_accums: ACCUM_HH: '{% for ah in verification.VX_ASNOW_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' task_run_MET_GridStat_vx_ens#statlc#_ASNOW#ACCUM_HH#h: <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"' envars: <<: *default_vars OBS_DIR: '&NOHRSC_OBS_DIR;' @@ -222,7 +222,7 @@ metatask_GridStat_MRMS_ensprob: VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["REFC", "RETOP"] %}{{ "%s " % var }}{% endif %}{% endfor %}' task_run_MET_GridStat_vx_ensprob_#VAR#: <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB"' envars: <<: *default_vars ACCUM_HH: '01' @@ -246,7 +246,7 @@ metatask_PointStat_NDAS_ensmeanprob: VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["ADPSFC", "ADPUPA"] %}{{ "%s " % var }}{% endif %}{% endfor %}' task_run_MET_PointStat_vx_ens#statlc#_#VAR#: <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"' envars: <<: *default_vars OBS_DIR: '&NDAS_OBS_DIR;' diff --git a/parm/wflow/verify_pre.yaml b/parm/wflow/verify_pre.yaml index b7511bf63f..0d4e1c2448 100644 --- a/parm/wflow/verify_pre.yaml +++ b/parm/wflow/verify_pre.yaml @@ -23,7 +23,7 @@ default_task_verify_pre: &default_task_verify_pre task_get_obs_ccpa: <<: *default_task_verify_pre - command: '&LOAD_MODULES_RUN_TASK_FP; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' + command: '&LOAD_MODULES_RUN_TASK; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' envars: <<: *default_vars ACCUM_HH: '01' @@ -37,7 +37,7 @@ task_get_obs_ccpa: task_get_obs_nohrsc: <<: *default_task_verify_pre - command: '&LOAD_MODULES_RUN_TASK_FP; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' + command: '&LOAD_MODULES_RUN_TASK; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' envars: <<: *default_vars OBS_DIR: '&NOHRSC_OBS_DIR;' @@ -50,7 +50,7 @@ task_get_obs_nohrsc: task_get_obs_mrms: <<: *default_task_verify_pre - command: '&LOAD_MODULES_RUN_TASK_FP; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' + command: '&LOAD_MODULES_RUN_TASK; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' envars: <<: *default_vars OBS_DIR: '&MRMS_OBS_DIR;' @@ -69,7 +69,7 @@ task_get_obs_ndas: OBS_DIR: '&NDAS_OBS_DIR;' OBTYPE: 'NDAS' FHR: '{% for h in range(0, workflow.FCST_LEN_HRS+1) %}{{ " %02d" % h }}{% endfor %}' - command: '&LOAD_MODULES_RUN_TASK_FP; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' + command: '&LOAD_MODULES_RUN_TASK; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' queue: "&QUEUE_HPSS;" native: '{% if platform.get("SCHED_NATIVE_CMD_HPSS") %}{{ platform.SCHED_NATIVE_CMD_HPSS }}{% else %}{{ platform.SCHED_NATIVE_CMD}}{% endif %}' partition: '{% if platform.get("PARTITION_HPSS") %}&PARTITION_HPSS;{% else %}None{% endif %}' @@ -80,7 +80,7 @@ task_run_MET_Pb2nc_obs: attrs: cycledefs: forecast maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PB2NC_OBS"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PB2NC_OBS"' envars: <<: *default_vars VAR: ADPSFC @@ -110,7 +110,7 @@ metatask_PcpCombine_obs: attrs: cycledefs: forecast maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' envars: <<: *default_vars VAR: APCP @@ -140,7 +140,7 @@ metatask_check_post_output_all_mems: attrs: cycledefs: forecast maxtries: '1' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_CHECK_POST_OUTPUT"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_CHECK_POST_OUTPUT"' envars: <<: *default_vars VAR: APCP @@ -221,7 +221,7 @@ metatask_PcpCombine_fcst_APCP_all_accums_all_mems: attrs: cycledefs: forecast maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' envars: <<: *default_vars VAR: APCP @@ -249,7 +249,7 @@ metatask_PcpCombine_fcst_ASNOW_all_accums_all_mems: attrs: cycledefs: forecast maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' envars: <<: *default_vars VAR: ASNOW diff --git a/scripts/exregional_check_post_output.sh b/scripts/exregional_check_post_output.sh index ba0d141c5d..320311cc94 100755 --- a/scripts/exregional_check_post_output.sh +++ b/scripts/exregional_check_post_output.sh @@ -1,5 +1,43 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The ex-script for checking the post output. +# +# Run-time environment variables: +# +# ACCUM_HH +# CDATE +# ENSMEM_INDX +# GLOBAL_VAR_DEFNS_FP +# VAR +# +# Experiment variables +# +# user: +# USHdir +# +# workflow: +# FCST_LEN_HRS +# +# global: +# DO_ENSEMBLE +# ENS_TIME_LAG_HRS +# +# verification: +# FCST_FN_TEMPLATE +# FCST_SUBDIR_TEMPLATE +# NUM_MISSING_FCST_FILES_MAX +# VX_FCST_INPUT_BASEDIR +# VX_NDIGITS_ENSMEM_NAMES +# +# constants: +# SECS_PER_HOUR +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +46,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_met_pcpcombine|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow global verification constants task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -64,7 +104,7 @@ user-staged. #----------------------------------------------------------------------- # i="0" -if [ "${DO_ENSEMBLE}" = "TRUE" ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]; then i=$( bc -l <<< "${ENSMEM_INDX}-1" ) fi time_lag=$( bc -l <<< "${ENS_TIME_LAG_HRS[$i]}*${SECS_PER_HOUR}" ) diff --git a/scripts/exregional_get_extrn_mdl_files.sh b/scripts/exregional_get_extrn_mdl_files.sh index 018a30c285..96c3136e33 100755 --- a/scripts/exregional_get_extrn_mdl_files.sh +++ b/scripts/exregional_get_extrn_mdl_files.sh @@ -1,5 +1,65 @@ #!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# The ex-script for getting the model files that will be used for either +# initial conditions or lateral boundary conditions for the experiment. +# +# Run-time environment variables: +# +# CDATE +# COMIN +# cyc +# DATA +# EXTRN_MDL_CDATE +# EXTRN_MDL_NAME +# EXTRN_MDL_STAGING_DIR +# GLOBAL_VAR_DEFNS_FP +# ICS_OR_LBCS +# NET +# PDY +# TIME_OFFSET_HRS +# +# Experiment variables +# +# user: +# MACHINE +# PARMdir +# RUN_ENVIR +# USHdir +# +# platform: +# EXTRN_MDL_DATA_STORES +# +# workflow: +# DATE_FIRST_CYCL +# EXTRN_MDL_VAR_DEFNS_FN +# FCST_LEN_CYCL +# INCR_CYCL_FREQ +# SYMLINK_FIX_FILES +# +# task_get_extrn_lbcs: +# EXTRN_MDL_FILES_LBCS +# EXTRN_MDL_SOURCE_BASEDIR_LBCS +# EXTRN_MDL_SYSBASEDIR_LBCS +# FV3GFS_FILE_FMT_LBCS +# LBC_SPEC_INTVL_HRS +# +# task_get_extrn_ics: +# EXTRN_MDL_FILES_ICS +# EXTRN_MDL_SOURCE_BASEDIR_ICS +# EXTRN_MDL_SYSBASEDIR_ICS +# FV3GFS_FILE_FMT_ICS +# +# global: +# DO_ENSEMBLE +# NUM_ENS_MEMBERS +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +68,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_get_extrn_ics|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP} + +for sect in user nco platform workflow global task_get_extrn_lbcs \ + task_get_extrn_ics ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -147,12 +211,12 @@ if [ -n "${input_file_path:-}" ] ; then --input_file_path ${input_file_path}" fi -if [ $SYMLINK_FIX_FILES = "TRUE" ]; then +if [ $(boolify $SYMLINK_FIX_FILES) = "TRUE" ]; then additional_flags="$additional_flags \ --symlink" fi -if [ $DO_ENSEMBLE == "TRUE" ] ; then +if [ $(boolify $DO_ENSEMBLE) = "TRUE" ] ; then mem_dir="/mem{mem:03d}" member_list=(1 ${NUM_ENS_MEMBERS}) additional_flags="$additional_flags \ @@ -222,7 +286,7 @@ if [ "${EXTRN_MDL_NAME}" = "GEFS" ]; then for num in $(seq -f "%02g" ${NUM_ENS_MEMBERS}); do sorted_fn=( ) for fcst_hr in "${all_fcst_hrs_array[@]}"; do - # Read in filenames from $EXTRN_MDL_FNS and sort them + # Read in filenames from EXTRN_MDL_FNS and sort them base_path="${EXTRN_MDL_STAGING_DIR}/mem`printf %03d $num`" filenames_array=`awk -F= '/EXTRN_MDL_FNS/{print $2}' $base_path/${EXTRN_DEFNS}` for filename in ${filenames_array[@]}; do diff --git a/scripts/exregional_get_verif_obs.sh b/scripts/exregional_get_verif_obs.sh index a74f11cd3a..6ad6aaed0e 100755 --- a/scripts/exregional_get_verif_obs.sh +++ b/scripts/exregional_get_verif_obs.sh @@ -1,5 +1,28 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The ex-script that checks, pulls, and stages observation data for +# model verification. +# +# Run-time environment variables: +# +# FHR +# GLOBAL_VAR_DEFNS_FP +# OBS_DIR +# OBTYPE +# PDY +# VAR +# +# Experiment variables +# +# user: +# USHdir +# PARMdir +# +#----------------------------------------------------------------------- + # #----------------------------------------------------------------------- # @@ -8,7 +31,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task " " ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_integration_test.py b/scripts/exregional_integration_test.py index f0ac3d9af6..996cf6320e 100755 --- a/scripts/exregional_integration_test.py +++ b/scripts/exregional_integration_test.py @@ -4,16 +4,16 @@ #### Python Script Documentation Block # # Script name: exregional_integration_test.py -# Script description: Ensures the correct number of netcdf files are generated +# Script description: Ensures the correct number of netcdf files are generated # for each experiment # # Author: Eddie Snyder Org: NOAA EPIC Date: 2024-02-05 -# +# # Instructions: 1. Pass the appropriate info for the required arguments: # --fcst_dir=/path/to/forecast/files # --fcst_len= # 2. Run script with arguments -# +# # Notes/future work: - Currently SRW App only accepts netcdf as the UFS WM # output file format. If that changes, then additional # logic is needed to address the other file formats. diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index c1876651d8..104875f8dc 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -1,5 +1,99 @@ #!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This script generates NetCDF-formatted grid files required as input +# the FV3 model configured for the regional domain. +# +# The output of this script is placed in a directory defined by GRID_DIR. +# +# More about the grid for regional configurations of FV3: +# +# a) This script creates grid files for tile 7 (reserved for the +# regional grid located soewhere within tile 6 of the 6 global +# tiles. +# +# b) Regional configurations of FV3 need two grid files, one with 3 +# halo cells and one with 4 halo cells. The width of the halo is +# the number of cells in the direction perpendicular to the +# boundary. +# +# c) The tile 7 grid file that this script creates includes a halo, +# with at least 4 cells to accommodate this requirement. The halo +# is made thinner in a subsequent step called "shave". +# +# d) We will let NHW denote the width of the wide halo that is wider +# than the required 3- or 4-cell halos. (NHW; N=number of cells, +# H=halo, W=wide halo) +# +# e) T7 indicates the cell count on tile 7. +# +# +# This script does the following: +# +# - Create the grid, either an ESGgrid with the regional_esg_grid +# executable or a GFDL-type grid with the hgrid executable +# - Calculate the regional grid's global uniform cubed-sphere grid +# equivalent resolution with the global_equiv_resol executable +# - Use the shave executable to reduce the halo to 3 and 4 cells +# - Call an ush script that runs the make_solo_mosaic executable +# +# Run-time environment variables: +# +# DATA +# GLOBAL_VAR_DEFNS_FP +# REDIRECT_OUT_ERR +# +# Experiment variables +# +# user: +# EXECdir +# USHdir +# +# platform: +# PRE_TASK_CMDS +# RUN_CMD_SERIAL + +# workflow: +# DOT_OR_USCORE +# GRID_GEN_METHOD +# RES_IN_FIXLAM_FILENAMES +# RGNL_GRID_NML_FN +# VERBOSE +# +# task_make_grid: +# GFDLgrid_NUM_CELLS +# GFDLgrid_USE_NUM_CELLS_IN_FILENAMES +# GRID_DIR +# +# constants: +# NH3 +# NH4 +# TILE_RGNL +# +# grid_params: +# DEL_ANGLE_X_SG +# DEL_ANGLE_Y_SG +# GFDLgrid_REFINE_RATIO +# IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG +# ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG +# JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG +# JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG +# LAT_CTR +# LON_CTR +# NEG_NX_OF_DOM_WITH_WIDE_HALO +# NEG_NY_OF_DOM_WITH_WIDE_HALO +# NHW +# NX +# NY +# PAZI +# STRETCH_FAC +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +102,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_grid" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow constants grid_params task_make_grid ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -276,6 +372,7 @@ generation executable (exec_fp): 'pazi': ${PAZI} " + # UW takes input from stdin when no -i/--input-config flag is provided (cat << EOF $settings EOF @@ -372,7 +469,7 @@ res_equiv=${res_equiv//$'\n'/} #----------------------------------------------------------------------- # if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - if [ "${GFDLgrid_USE_NUM_CELLS_IN_FILENAMES}" = "TRUE" ]; then + if [ $(boolify "${GFDLgrid_USE_NUM_CELLS_IN_FILENAMES}") = "TRUE" ]; then CRES="C${GFDLgrid_NUM_CELLS}" else CRES="C${res_equiv}" @@ -380,7 +477,15 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then elif [ "${GRID_GEN_METHOD}" = "ESGgrid" ]; then CRES="C${res_equiv}" fi -set_file_param "${GLOBAL_VAR_DEFNS_FP}" "CRES" "'$CRES'" + + # UW takes the update values from stdin when no --update-file flag is + # provided. It needs --update-format to do it correctly, though. +echo "workflow: {CRES: ${CRES}}" | uw config realize \ + --input-file $GLOBAL_VAR_DEFNS_FP \ + --update-format yaml \ + --output-file $GLOBAL_VAR_DEFNS_FP \ + --verbose + # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh index 875249b107..8cd49076b0 100755 --- a/scripts/exregional_make_ics.sh +++ b/scripts/exregional_make_ics.sh @@ -1,5 +1,83 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The ex-scrtipt that sets up and runs chgres_cube for preparing initial +# conditions for the FV3 forecast +# +# Run-time environment variables: +# +# COMIN +# COMOUT +# COMROOT +# DATA +# DATAROOT +# DATA_SHARE +# EXTRN_MDL_CDATE +# GLOBAL_VAR_DEFNS_FP +# INPUT_DATA +# NET +# PDY +# REDIRECT_OUT_ERR +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# EXECdir +# MACHINE +# PARMdir +# RUN_ENVIR +# USHdir +# +# platform: +# FIXgsm +# PRE_TASK_CMDS +# RUN_CMD_UTILS +# +# workflow: +# CCPP_PHYS_SUITE +# COLDSTART +# CRES +# DATE_FIRST_CYCL +# DOT_OR_USCORE +# EXTRN_MDL_VAR_DEFNS_FN +# FIXlam +# SDF_USES_RUC_LSM +# SDF_USES_THOMPSON_MP +# THOMPSON_MP_CLIMO_FP +# VERBOSE +# +# task_make_ics: +# FVCOM_DIR +# FVCOM_FILE +# FVCOM_WCSTART +# KMP_AFFINITY_MAKE_ICS +# OMP_NUM_THREADS_MAKE_ICS +# OMP_STACKSIZE_MAKE_ICS +# USE_FVCOM +# VCOORD_FILE +# +# task_get_extrn_ics: +# EXTRN_MDL_NAME_ICS +# FV3GFS_FILE_FMT_ICS +# +# global: +# HALO_BLEND +# +# cpl_aqm_parm: +# CPL_AQM +# +# constants: +# NH0 +# NH4 +# TILE_RGNL +# +#----------------------------------------------------------------------- +# + + # #----------------------------------------------------------------------- # @@ -8,7 +86,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_ics|task_get_extrn_ics" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow global cpl_aqm_parm constants task_get_extrn_ics task_make_ics ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -306,7 +386,7 @@ convert_nst="" nsoill_out="4" if [ "${EXTRN_MDL_NAME_ICS}" = "HRRR" -o \ "${EXTRN_MDL_NAME_ICS}" = "RAP" ] && \ - [ "${SDF_USES_RUC_LSM}" = "TRUE" ]; then + [ $(boolify "${SDF_USES_RUC_LSM}") = "TRUE" ]; then nsoill_out="9" fi # @@ -326,7 +406,7 @@ fi thomp_mp_climo_file="" if [ "${EXTRN_MDL_NAME_ICS}" != "HRRR" -a \ "${EXTRN_MDL_NAME_ICS}" != "RAP" ] && \ - [ "${SDF_USES_THOMPSON_MP}" = "TRUE" ]; then + [ $(boolify "${SDF_USES_THOMPSON_MP}") = "TRUE" ]; then thomp_mp_climo_file="${THOMPSON_MP_CLIMO_FP}" fi # @@ -643,9 +723,9 @@ POST_STEP # #----------------------------------------------------------------------- # -if [ "${CPL_AQM}" = "TRUE" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then COMOUT="${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}" #temporary path, should be removed later - if [ "${COLDSTART}" = "TRUE" ] && [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ]; then + if [ $(boolify "${COLDSTART}") = "TRUE" ] && [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ]; then data_trans_path="${COMOUT}" else data_trans_path="${DATA_SHARE}" @@ -667,7 +747,7 @@ fi # #----------------------------------------------------------------------- # -if [ "${USE_FVCOM}" = "TRUE" ]; then +if [ $(boolify "${USE_FVCOM}") = "TRUE" ]; then #Format for fvcom_time: YYYY-MM-DDTHH:00:00.000000 fvcom_exec_fn="fvcom_to_FV3" diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh index 5a2d24bcea..35b4da388a 100755 --- a/scripts/exregional_make_lbcs.sh +++ b/scripts/exregional_make_lbcs.sh @@ -1,5 +1,83 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The ex-scrtipt that sets up and runs chgres_cube for preparing lateral +# boundary conditions for the FV3 forecast +# +# Run-time environment variables: +# +# COMIN +# COMOUT +# COMROOT +# DATA +# DATAROOT +# DATA_SHARE +# EXTRN_MDL_CDATE +# INPUT_DATA +# GLOBAL_VAR_DEFNS_FP +# NET +# PDY +# REDIRECT_OUT_ERR +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# EXECdir +# MACHINE +# PARMdir +# RUN_ENVIR +# USHdir +# +# platform: +# FIXgsm +# PRE_TASK_CMDS +# RUN_CMD_UTILS +# +# workflow: +# CCPP_PHYS_SUITE +# COLDSTART +# CRES +# DATE_FIRST_CYCL +# DOT_OR_USCORE +# EXTRN_MDL_VAR_DEFNS_FN +# FIXlam +# SDF_USES_RUC_LSM +# SDF_USES_THOMPSON_MP +# THOMPSON_MP_CLIMO_FP +# VERBOSE +# +# task_get_extrn_lbcs: +# EXTRN_MDL_NAME_LBCS +# FV3GFS_FILE_FMT_LBCS +# +# task_make_lbcs: +# FVCOM_DIR +# FVCOM_FILE +# FVCOM_WCSTART +# KMP_AFFINITY_MAKE_LBCS +# OMP_NUM_THREADS_MAKE_LBCS +# OMP_STACKSIZE_MAKE_LBCS +# USE_FVCOM +# VCOORD_FILE +# +# global: +# HALO_BLEND +# +# cpl_aqm_parm: +# CPL_AQM +# +# constants: +# NH0 +# NH4 +# TILE_RGNL +# +#----------------------------------------------------------------------- +# + + # #----------------------------------------------------------------------- # @@ -8,7 +86,10 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_lbcs|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP} +set -x +for sect in user nco platform workflow global cpl_aqm_parm constants task_get_extrn_lbcs task_make_lbcs ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -250,7 +331,7 @@ tracers="\"\"" thomp_mp_climo_file="" if [ "${EXTRN_MDL_NAME_LBCS}" != "HRRR" -a \ "${EXTRN_MDL_NAME_LBCS}" != "RAP" ] && \ - [ "${SDF_USES_THOMPSON_MP}" = "TRUE" ]; then + [ $(boolify "${SDF_USES_THOMPSON_MP}") = "TRUE" ]; then thomp_mp_climo_file="${THOMPSON_MP_CLIMO_FP}" fi # @@ -495,6 +576,7 @@ FORTRAN namelist file has not specified for this external LBC model (EXTRN_MDL_N " nml_fn="fort.41" + # UW takes input from stdin when no -i/--input-config flag is provided (cat << EOF $settings EOF @@ -559,7 +641,7 @@ located in the following directory: lbc_spec_fhrs=( "${EXTRN_MDL_FHRS[$i]}" ) fcst_hhh=$(( ${lbc_spec_fhrs} - ${EXTRN_MDL_LBCS_OFFSET_HRS} )) fcst_hhh_FV3LAM=$( printf "%03d" "$fcst_hhh" ) - if [ "${CPL_AQM}" = "TRUE" ]; then + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then cp -p gfs.bndy.nc ${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc else mv gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index 9a3d5da7fc..34b1675d8c 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -1,5 +1,86 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# This ex-script is responsible for creating orography files for the FV3 +# forecast. +# +# The output of this script is placed in a directory defined by OROG_DIR +# +# More about the orog for the regional configuration of the FV3: +# +# a) Only the tile 7 orography file is created. +# +# b) This orography file contains a halo of the same width (NHW) +# as the grid file for tile 7 generated by the make_grid script +# +# c) Filtered versions of the orogoraphy files are created with the +# same width (NHW) as the unfiltered orography file and the grid +# file. FV3 requires two filtered orography files, one with no +# halo cells and one with 4 halo cells. +# +# This script does the following: +# +# - Create the raw orography files by running the orog executable. +# - Run the orog_gsl executable if any of several GSL-developed +# physics suites is chosen by the user. +# - Run the filter_topo executable on the raw orography files +# - Run the shave executable for the 0- and 4-cell halo orography +# files +# +# Run-time environment variables: +# +# DATA +# GLOBAL_VAR_DEFNS_FP +# REDIRECT_OUT_ERR +# +# Experiment variables +# +# user: +# EXECdir +# USHdir +# +# platform: +# FIXorg +# PRE_TASK_CMDS +# RUN_CMD_SERIAL +# +# workflow: +# CCPP_PHYS_SUITE +# CRES +# DOT_OR_USCORE +# FIXam +# FIXlam +# GRID_GEN_METHOD +# PREEXISTING_DIR_METHOD +# VERBOSE +# +# task_make_orog: +# KMP_AFFINITY_MAKE_OROG +# OMP_NUM_THREADS_MAKE_OROG +# OMP_STACKSIZE_MAKE_OROG +# OROG_DIR +# +# task_make_grid: +# GFDLgrid_NUM_CELLS +# GFDLgrid_STRETCH_FAC +# GFDLgrid_REFINE_RATIO +# +# constants: +# NH0 +# NH4 +# TILE_RGNL +# +# grid_params: +# NHW +# NX +# NY +# STRETCH_FAC +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +89,10 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_orog|task_make_grid" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow constants grid_params task_make_grid task_make_orog task_make_grid ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done + # #----------------------------------------------------------------------- # @@ -30,13 +114,7 @@ source_config_for_task "task_make_orog|task_make_grid" ${GLOBAL_VAR_DEFNS_FP} scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) scrfunc_fn=$( basename "${scrfunc_fp}" ) scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# + print_info_msg " ======================================================================== Entering script: \"${scrfunc_fn}\" @@ -54,17 +132,7 @@ This is the ex-script for the task that generates orography files. export KMP_AFFINITY=${KMP_AFFINITY_MAKE_OROG} export OMP_NUM_THREADS=${OMP_NUM_THREADS_MAKE_OROG} export OMP_STACKSIZE=${OMP_STACKSIZE_MAKE_OROG} -# -#----------------------------------------------------------------------- -# -# Load modules and set various computational parameters and directories. -# -# Note: -# These module loads should all be moved to modulefiles. This has been -# done for Hera but must still be done for other machines. -# -#----------------------------------------------------------------------- -# + eval ${PRE_TASK_CMDS} if [ -z "${RUN_CMD_SERIAL:-}" ] ; then @@ -103,9 +171,6 @@ mkdir -p "${shave_dir}" # #----------------------------------------------------------------------- # -# Set the name and path to the executable that generates the raw orography -# file and make sure that it exists. -# exec_fn="orog" exec_fp="$EXECdir/${exec_fn}" if [ ! -f "${exec_fp}" ]; then @@ -114,10 +179,7 @@ The executable (exec_fp) for generating the orography file does not exist: exec_fp = \"${exec_fp}\" Please ensure that you've built this executable." fi -# -# Create a temporary (work) directory in which to generate the raw orography -# file and change location to it. -# + DATA="${DATA:-${raw_dir}/tmp}" mkdir -p "${DATA}" cd "${DATA}" @@ -131,15 +193,7 @@ cp ${FIXorg}/gmted2010.30sec.int fort.235 # #----------------------------------------------------------------------- # -# The orography filtering code reads in from the grid mosaic file the -# the number of tiles, the name of the grid file for each tile, and the -# dimensions (nx and ny) of each tile. Next, set the name of the grid -# mosaic file and create a symlink to it in filter_dir. -# -# Note that in the namelist file for the orography filtering code (created -# later below), the mosaic file name is saved in a variable called -# "grid_file". It would have been better to call this "mosaic_file" -# instead so it doesn't get confused with the grid file for a given tile... +# Get the grid file info from the mosaic file # #----------------------------------------------------------------------- # @@ -152,21 +206,15 @@ grid_fp="${FIXlam}/${grid_fn}" # #----------------------------------------------------------------------- # -# Set input parameters for the orography generation executable and write -# them to a text file. +# Set input parameters for the orog executable in a formatted text file. +# The executable takes its parameters via the command line. # -# Note that it doesn't matter what lonb and latb are set to below because -# if we specify an input grid file to the executable read in (which is -# what we do below), then if lonb and latb are not set to the dimensions -# of the grid specified in that file (divided by 2 since the grid file -# specifies a "supergrid"), then lonb and latb effectively get reset to -# the dimensions specified in the grid file. +# Note: lonb and latb are placeholders in this case since the program +# uses the ones obtained from the grid file. # #----------------------------------------------------------------------- # mtnres=1 -#lonb=$res -#latb=$res lonb=0 latb=0 jcap=0 @@ -195,15 +243,13 @@ cat "${input_redirect_fn}" # Call the executable to generate the raw orography file corresponding # to tile 7 (the regional domain) only. # -# The following will create an orography file named +# The script moves the output file from its temporary directory to the +# OROG_DIR and names it: # -# oro.${CRES}.tile7.nc +# ${CRES}_raw_orog.tile7.halo${NHW}.nc # -# and will place it in OROG_DIR. Note that this file will include -# orography for a halo of width NHW cells around tile 7. The follow- -# ing will also create a work directory called tile7 under OROG_DIR. -# This work directory can be removed after the orography file has been -# created (it is currently not deleted). +# Note that this file will include orography for a halo of width NHW +# cells around tile 7. # #----------------------------------------------------------------------- # @@ -225,9 +271,7 @@ cd - # #----------------------------------------------------------------------- # -# Move the raw orography file from the temporary directory to raw_dir. -# In the process, rename it such that its name includes CRES and the halo -# width. +# Move the raw orography file and rename it. # #----------------------------------------------------------------------- # @@ -240,9 +284,9 @@ mv "${raw_orog_fp_orig}" "${raw_orog_fp}" # #----------------------------------------------------------------------- # -# Call the code to generate the two orography statistics files (large- -# and small-scale) needed for the drag suite in the FV3_HRRR physics -# suite. +# Call the orog_gsl executable to generate the two orography statistics +# files (large- and small-scale) needed for the drag suite in certain +# GSL physics suites. # #----------------------------------------------------------------------- # @@ -321,14 +365,14 @@ fi # resolution of res_regional. These interpolated/extrapolated values are # then used to perform the orography filtering. # -# The above approach works for a GFDLgrid type of grid. To handle ESGgrid -# type grids, we set res in the namelist to the orography filtering code -# the equivalent global uniform cubed-sphere resolution of the regional -# grid, we set stretch_fac to 1 (since the equivalent resolution assumes -# a uniform global grid), and we set refine_ratio to 1. This will cause -# res_regional above to be set to the equivalent global uniform cubed- -# sphere resolution, so the filtering parameter values will be interpolated/ -# extrapolated to that resolution value. +# To handle ESGgrid type grids, we set res in the namelist to the +# orography filtering code the equivalent global uniform cubed-sphere +# resolution of the regional grid, we set stretch_fac to 1 (since the +# equivalent resolution assumes a uniform global grid), and we set +# refine_ratio to 1. This will cause res_regional above to be set to +# the equivalent global uniform cubed-sphere resolution, so the +# filtering parameter values will be interpolated/extrapolated to that +# resolution value. # #----------------------------------------------------------------------- # @@ -346,13 +390,11 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then # Really depends on what EMC wants to do. res="${GFDLgrid_NUM_CELLS}" -# stretch_fac="${GFDLgrid_STRETCH_FAC}" refine_ratio="${GFDLgrid_REFINE_RATIO}" elif [ "${GRID_GEN_METHOD}" = "ESGgrid" ]; then res="${CRES:1}" -# stretch_fac="${STRETCH_FAC}" refine_ratio="1" fi @@ -368,17 +410,12 @@ The executable (exec_fp) for filtering the raw orography does not exist: Please ensure that you've built this executable." fi # -# The orography filtering executable replaces the contents of the given -# raw orography file with a file containing the filtered orography. The -# name of the input raw orography file is in effect specified by the -# namelist variable topo_file; the orography filtering code assumes that -# this name is constructed by taking the value of topo_file and appending -# to it the string ".tile${N}.nc", where N is the tile number (which for -# a regional grid, is always 7). (Note that topo_file may start with a -# a path to the orography file that the filtering code will read in and -# replace.) Thus, we now copy the raw orography file (whose full path is -# specified by raw_orog_fp) to filter_dir and in the process rename it -# such that its new name: +# The filter_topo program overwrites its input file with filtered +# output, which is specified by topo_file in the namelist, but with a +# suffix ".tile7.nc" for the regional configuration. To avoid +# overwriting the output of the orog program, copy its output file to +# the filter_topo working directory and rename it. Here, the name is +# chosen such that it: # # (1) indicates that it contains filtered orography data (because that # is what it will contain once the orography filtering executable @@ -392,21 +429,20 @@ filtered_orog_fp_prefix="${filter_dir}/${filtered_orog_fn_prefix}" filtered_orog_fp="${filtered_orog_fp_prefix}.${fn_suffix_without_halo}" cp "${raw_orog_fp}" "${filtered_orog_fp}" # -# The orography filtering executable looks for the grid file specified -# in the grid mosaic file (more specifically, specified by the gridfiles -# variable in the mosaic file) in the directory in which the executable -# is running. Recall that above, we already extracted the name of the -# grid file from the mosaic file and saved it in the variable grid_fn, -# and we saved the full path to this grid file in the variable grid_fp. -# Thus, we now create a symlink in the filter_dir directory (where the -# filtering executable will run) with the same name as the grid file and -# point it to the actual grid file specified by grid_fp. +# The filter_topo program looks for the grid file specified +# in the mosaic file (more specifically, specified by the gridfiles +# variable in the mosaic file) in its own run directory. Make a symlink +# to it. # create_symlink_to_file ${grid_fp} ${filter_dir}/${grid_fn} TRUE # # Create the namelist file (in the filter_dir directory) that the orography # filtering executable will read in. # +# Note that in the namelist file for the orography filtering code (created +# later below), the mosaic file name is saved in a variable called +# "grid_file". It would have been better to call this "mosaic_file" +# instead so it doesn't get confused with the grid file for a given tile. cat > "${filter_dir}/input.nml" < "${filter_dir}/input.nml" < ${nml_fn} + > ${ascii_fn} PREP_STEP -eval ${RUN_CMD_SERIAL} ${exec_fp} < ${nml_fn} ${REDIRECT_OUT_ERR} || \ +eval ${RUN_CMD_SERIAL} ${exec_fp} < ${ascii_fn} ${REDIRECT_OUT_ERR} || \ print_err_msg_exit "\ Call to executable (exec_fp) to generate a (filtered) orography file with a ${NH0}-cell-wide halo from the orography file with a {NHW}-cell-wide halo returned with nonzero exit code: exec_fp = \"${exec_fp}\" -The namelist file (nml_fn) used in this call is in directory shave_dir: - nml_fn = \"${nml_fn}\" +The config file (ascii_fn) used in this call is in directory shave_dir: + ascii_fn = \"${ascii_fn}\" shave_dir = \"${shave_dir}\"" POST_STEP mv ${shaved_fp} ${OROG_DIR} # -# Create an input namelist file for the shave executable to generate an +# Create an input config file for the shave executable to generate an # orography file with a 4-cell-wide halo from the one with a wide halo. # Then call the shave executable. Finally, move the resultant file to # the OROG_DIR directory. @@ -524,21 +554,21 @@ print_info_msg "$VERBOSE" " \"Shaving\" filtered orography file with a ${NHW}-cell-wide halo to obtain a filtered orography file with a ${NH4}-cell-wide halo..." -nml_fn="input.shave.orog.halo${NH4}" +ascii_fn="input.shave.orog.halo${NH4}" shaved_fp="${shave_dir}/${CRES}${DOT_OR_USCORE}oro_data.tile${TILE_RGNL}.halo${NH4}.nc" printf "%s %s %s %s %s\n" \ $NX $NY ${NH4} \"${unshaved_fp}\" \"${shaved_fp}\" \ - > ${nml_fn} + > ${ascii_fn} PREP_STEP -eval ${RUN_CMD_SERIAL} ${exec_fp} < ${nml_fn} ${REDIRECT_OUT_ERR} || \ +eval ${RUN_CMD_SERIAL} ${exec_fp} < ${ascii_fn} ${REDIRECT_OUT_ERR} || \ print_err_msg_exit "\ Call to executable (exec_fp) to generate a (filtered) orography file with a ${NH4}-cell-wide halo from the orography file with a {NHW}-cell-wide halo returned with nonzero exit code: exec_fp = \"${exec_fp}\" -The namelist file (nml_fn) used in this call is in directory shave_dir: - nml_fn = \"${nml_fn}\" +The namelist file (ascii_fn) used in this call is in directory shave_dir: + ascii_fn = \"${ascii_fn}\" shave_dir = \"${shave_dir}\"" POST_STEP mv "${shaved_fp}" "${OROG_DIR}" @@ -549,8 +579,8 @@ cd - # #----------------------------------------------------------------------- # -# Add link in ORIG_DIR directory to the orography file with a 4-cell-wide -# halo such that the link name do not contain the halo width. These links +# Add link in OROG_DIR directory to the orography file with a 4-cell-wide +# halo such that the link name does not contain the halo width. These links # are needed by the make_sfc_climo task. # # NOTE: It would be nice to modify the sfc_climo_gen_code to read in @@ -563,13 +593,7 @@ python3 $USHdir/link_fix.py \ --file-group "orog" || \ print_err_msg_exit "\ Call to function to create links to orography files failed." -# -#----------------------------------------------------------------------- -# -# Print message indicating successful completion of script. -# -#----------------------------------------------------------------------- -# + print_info_msg " ======================================================================== Orography files with various halo widths generated successfully!!! diff --git a/scripts/exregional_make_sfc_climo.sh b/scripts/exregional_make_sfc_climo.sh index c4ee8f25b1..a916228b1f 100755 --- a/scripts/exregional_make_sfc_climo.sh +++ b/scripts/exregional_make_sfc_climo.sh @@ -1,5 +1,52 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# This ex-script generates surface climatology files needed to run FV3 +# forecasts. +# +# The script runs the sfc_climo_gen UFS Utils program, and links the +# output to the SFC_CLIMO_GEN directory +# +# Run-time environment variables: +# +# DATA +# GLOBAL_VAR_DEFNS_FP +# REDIRECT_OUT_ERR +# +# Experiment variables +# +# user: +# EXECdir +# USHdir +# +# platform: +# FIXsfc +# PRE_TASK_CMDS +# RUN_CMD_UTILS +# +# workflow: +# CRES +# DOT_OR_USCORE +# FIXlam +# VERBOSE +# +# task_make_sfc_climo: +# KMP_AFFINITY_MAKE_SFC_CLIMO +# OMP_NUM_THREADS_MAKE_SFC_CLIMO +# OMP_STACKSIZE_MAKE_SFC_CLIMO +# SFC_CLIMO_DIR +# +# constants: +# GTYPE +# NH0 +# NH4 +# TILE_RGNL +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +55,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_sfc_climo" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow constants task_make_sfc_climo ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index f769d4e225..0241dbd728 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -1,5 +1,113 @@ #!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This ex-script is responsible for running the FV3 regional forecast. +# +# Run-time environment variables: +# +# CDATE +# COMIN +# COMOUT +# COMROOT +# DATA +# DBNROOT +# GLOBAL_VAR_DEFNS_FP +# INPUT_DATA +# NET +# PDY +# REDIRECT_OUT_ERR +# RUN +# SENDDBN +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# MACHINE +# PARMdir +# RUN_ENVIR +# USHdir +# +# platform: +# PRE_TASK_CMDS +# RUN_CMD_FCST +# +# workflow: +# CCPP_PHYS_DIR +# CCPP_PHYS_SUITE +# COLDSTART +# CRES +# DATA_TABLE_FN +# DATA_TABLE_FP +# DATE_FIRST_CYCL +# DOT_OR_USCORE +# EXPTDIR +# FCST_LEN_CYCL +# FCST_LEN_HRS +# FIELD_DICT_FP +# FIELD_DICT_FN +# FIELD_TABLE_FN +# FIELD_TABLE_FP +# FIXam +# FIXclim +# FIXlam +# FV3_NML_FN +# FV3_NML_FP +# FV3_NML_STOCH_FP +# INCR_CYCL_FREQ +# PREDEF_GRID_NAME +# SYMLINK_FIX_FILES +# VERBOSE +# +# task_get_extrn_lbcs: +# LBC_SPEC_INTVL_HRS +# +# task_run_fcst: +# DO_FCST_RESTART +# DT_ATMOS +# FV3_EXEC_FP +# KMP_AFFINITY_RUN_FCST +# OMP_NUM_THREADS_RUN_FCST +# OMP_STACKSIZE_RUN_FCST +# PRINT_ESMF +# RESTART_INTERVAL +# USE_MERRA_CLIMO +# WRITE_DOPOST +# +# task_run_post: +# CUSTOM_POST_CONFIG_FP +# DT_SUBHOURLY_POST_MNTS +# POST_OUTPUT_DOMAIN_NAME +# SUB_HOURLY_POST +# USE_CUSTOM_POST_CONFIG_FILE +# +# global: +# DO_ENSEMBLE +# DO_LSM_SPP +# DO_SHUM +# DO_SKEB +# DO_SPP +# DO_SPPT +# +# cpl_aqm_parm: +# AQM_RC_PRODUCT_FN +# CPL_AQM +# +# constants: +# NH0 +# NH3 +# NH4 +# TILE_RGNL +# +# fixed_files: +# CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +116,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_fcst|task_run_post|task_get_extrn_ics|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow global cpl_aqm_parm constants fixed_files \ + task_get_extrn_lbcs task_run_fcst task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done + # #----------------------------------------------------------------------- # @@ -57,7 +169,7 @@ export OMP_NUM_THREADS=${OMP_NUM_THREADS_RUN_FCST} export OMP_STACKSIZE=${OMP_STACKSIZE_RUN_FCST} export MPI_TYPE_DEPTH=20 export ESMF_RUNTIME_COMPLIANCECHECK=OFF:depth=4 -if [ "${PRINT_ESMF}" = "TRUE" ]; then +if [ $(boolify "${PRINT_ESMF}") = "TRUE" ]; then export ESMF_RUNTIME_PROFILE=ON export ESMF_RUNTIME_PROFILE_OUTPUT="SUMMARY" fi @@ -227,7 +339,7 @@ cd ${DATA}/INPUT # relative_link_flag="FALSE" -if [ "${CPL_AQM}" = "TRUE" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then COMIN="${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}" #temporary path, should be removed later target="${COMIN}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc" @@ -307,7 +419,7 @@ static) files in the FIXam directory: # isn't really an advantage to using relative symlinks, so we use symlinks # with absolute paths. # -if [ "${SYMLINK_FIX_FILES}" == "FALSE" ]; then +if [ $(boolify "${SYMLINK_FIX_FILES}") = "FALSE" ]; then relative_link_flag="TRUE" else relative_link_flag="FALSE" @@ -336,7 +448,7 @@ done # #----------------------------------------------------------------------- # -if [ "${USE_MERRA_CLIMO}" = "TRUE" ]; then +if [ $(boolify "${USE_MERRA_CLIMO}") = "TRUE" ]; then for f_nm_path in ${FIXclim}/*; do f_nm=$( basename "${f_nm_path}" ) pre_f="${f_nm%%.*}" @@ -397,16 +509,16 @@ create_symlink_to_file ${FIELD_TABLE_FP} ${DATA}/${FIELD_TABLE_FN} ${relative_li create_symlink_to_file ${FIELD_DICT_FP} ${DATA}/${FIELD_DICT_FN} ${relative_link_flag} -if [ ${WRITE_DOPOST} = "TRUE" ]; then +if [ $(boolify ${WRITE_DOPOST}) = "TRUE" ]; then cp ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat - if [ ${USE_CUSTOM_POST_CONFIG_FILE} = "TRUE" ]; then + if [ $(boolify ${USE_CUSTOM_POST_CONFIG_FILE}) = "TRUE" ]; then post_config_fp="${CUSTOM_POST_CONFIG_FP}" print_info_msg " ==================================================================== CUSTOM_POST_CONFIG_FP = \"${CUSTOM_POST_CONFIG_FP}\" ====================================================================" else - if [ "${CPL_AQM}" = "TRUE" ]; then + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then post_config_fp="${PARMdir}/upp/postxconfig-NT-AQM.txt" else post_config_fp="${PARMdir}/upp/postxconfig-NT-fv3lam.txt" @@ -420,7 +532,7 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then cp ${post_config_fp} ./postxconfig-NT.txt cp ${PARMdir}/upp/params_grib2_tbl_new . # Set itag for inline-post: - if [ "${CPL_AQM}" = "TRUE" ]; then + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then post_itag_add="aqf_on=.true.," else post_itag_add="" @@ -455,11 +567,14 @@ cp ${CCPP_PHYS_DIR}/noahmptable.tbl . #----------------------------------------------------------------------- # STOCH="FALSE" -if ([ "${DO_SPP}" = "TRUE" ] || [ "${DO_SPPT}" = "TRUE" ] || [ "${DO_SHUM}" = "TRUE" ] || \ - [ "${DO_SKEB}" = "TRUE" ] || [ "${DO_LSM_SPP}" = "TRUE" ]); then +if ([ $(boolify "${DO_SPP}") = "TRUE" ] || \ + [ $(boolify "${DO_SPPT}") = "TRUE" ] || \ + [ $(boolify "${DO_SHUM}") = "TRUE" ] || \ + [ $(boolify "${DO_SKEB}") = "TRUE" ] || \ + [ $(boolify "${DO_LSM_SPP}") = "TRUE" ]); then STOCH="TRUE" fi -if [ "${STOCH}" == "TRUE" ]; then +if [ "${STOCH}" = "TRUE" ]; then cp ${FV3_NML_STOCH_FP} ${DATA}/${FV3_NML_FN} else ln -sf ${FV3_NML_FP} ${DATA}/${FV3_NML_FN} @@ -472,7 +587,7 @@ fi # #----------------------------------------------------------------------- # -if ([ "$STOCH" == "TRUE" ] && [ "${DO_ENSEMBLE}" = "TRUE" ]); then +if ([ "$STOCH" == "TRUE" ] && [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]); then python3 $USHdir/set_fv3nml_ens_stoch_seeds.py \ --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ --cdate "$CDATE" || print_err_msg_exit "\ @@ -489,7 +604,7 @@ fi # #----------------------------------------------------------------------- # -if [ "${CPL_AQM}" = "TRUE" ] && [ "${PREDEF_GRID_NAME}" = "AQM_NA_13km" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ] && [ "${PREDEF_GRID_NAME}" = "AQM_NA_13km" ]; then python3 $USHdir/update_input_nml.py \ --namelist "${DATA}/${FV3_NML_FN}" \ --aqm_na_13km || print_err_msg_exit "\ @@ -507,10 +622,10 @@ fi #----------------------------------------------------------------------- # flag_fcst_restart="FALSE" -if [ "${DO_FCST_RESTART}" = "TRUE" ] && [ "$(ls -A ${DATA}/RESTART )" ]; then +if [ $(boolify "${DO_FCST_RESTART}") = "TRUE" ] && [ "$(ls -A ${DATA}/RESTART )" ]; then cp input.nml input.nml_orig cp model_configure model_configure_orig - if [ "${CPL_AQM}" = "TRUE" ]; then + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then cp aqm.rc aqm.rc_orig fi relative_link_flag="FALSE" @@ -574,8 +689,10 @@ fi # #----------------------------------------------------------------------- # -if [ "${CPL_AQM}" = "TRUE" ]; then - if [ "${COLDSTART}" = "TRUE" ] && [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ] && [ "${flag_fcst_restart}" = "FALSE" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then + if [ $(boolify "${COLDSTART}") = "TRUE" ] && \ + [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ] && \ + [ $(boolify "${flag_fcst_restart}") = "FALSE" ]; then init_concentrations="true" else init_concentrations="false" @@ -666,7 +783,7 @@ fi # #----------------------------------------------------------------------- # -if [ "${RUN_ENVIR}" = "nco" ] && [ "${CPL_AQM}" = "TRUE" ]; then +if [ "${RUN_ENVIR}" = "nco" ] && [ $(boolify "${CPL_AQM}") = "TRUE" ]; then # create an intermediate symlink to RESTART ln -sf "${DATA}/RESTART" "${COMIN}/RESTART" fi @@ -725,7 +842,7 @@ POST_STEP # #----------------------------------------------------------------------- # -if [ "${CPL_AQM}" = "TRUE" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then if [ "${RUN_ENVIR}" = "nco" ]; then if [ -d "${COMIN}/RESTART" ] && [ "$(ls -A ${DATA}/RESTART)" ]; then rm -rf "${COMIN}/RESTART" @@ -758,8 +875,8 @@ fi # #----------------------------------------------------------------------- # -if [ ${WRITE_DOPOST} = "TRUE" ]; then - +if [ $(boolify ${WRITE_DOPOST}) = "TRUE" ]; then + yyyymmdd=${PDY} hh=${cyc} fmn="00" @@ -785,7 +902,7 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then post_fn_suffix="GrbF${fhr_d}" post_renamed_fn_suffix="f${fhr}${post_mn_or_null}.${POST_OUTPUT_DOMAIN_NAME}.grib2" - if [ "${CPL_AQM}" = "TRUE" ]; then + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then fids=( "cmaq" ) else fids=( "prslev" "natlev" ) @@ -800,15 +917,15 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then if [ $RUN_ENVIR != "nco" ]; then basetime=$( $DATE_UTIL --date "$yyyymmdd $hh" +%y%j%H%M ) symlink_suffix="_${basetime}f${fhr}${post_mn}" - create_symlink_to_file ${post_renamed_fn} ${FID}${symlink_suffix} TRUE + create_symlink_to_file ${post_renamed_fn} ${FID}${symlink_suffix} TRUE fi # DBN alert - if [ $SENDDBN = "TRUE" ]; then + if [ "$SENDDBN" = "TRUE" ]; then $DBNROOT/bin/dbn_alert MODEL rrfs_post ${job} ${COMOUT}/${post_renamed_fn} fi done - if [ "${CPL_AQM}" = "TRUE" ]; then + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then mv ${DATA}/dynf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr}.nc mv ${DATA}/phyf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr}.nc fi diff --git a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh index 93caeaa7f2..05503bb963 100755 --- a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh +++ b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_ensgrid|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post task_run_vx_ensgrid ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh index 4f871e6e1b..03c6093943 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_gridstat|task_run_vx_pointstat|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post task_run_vx_gridstat task_run_vx_pointstat ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -118,7 +122,7 @@ set_vx_params \ #----------------------------------------------------------------------- # i="0" -if [ "${DO_ENSEMBLE}" = "TRUE" ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]; then i=$( bc -l <<< "${ENSMEM_INDX}-1" ) fi time_lag=$( bc -l <<< "${ENS_TIME_LAG_HRS[$i]}*${SECS_PER_HOUR}" ) @@ -151,7 +155,7 @@ else # or, better, just remove this variale and code "/${ensmem_name}" where # slash_ensmem_subdir_or_null currently appears below. # - if [ "${DO_ENSEMBLE}" = "TRUE" ]; then + if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]; then slash_ensmem_subdir_or_null="/${ensmem_name}" else slash_ensmem_subdir_or_null="" diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh index 6e4a4ff33f..12a54dc21b 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_ensgrid_mean|task_run_vx_enspoint_mean|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post task_run_vx_ensgrid_mean task_run_vx_enspoint_mean ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh index 924d321ec3..8fd4a59dfe 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_ensgrid_prob|task_run_vx_enspoint_prob|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_vx_ensgrid_prob task_run_vx_enspoint_prob task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_pb2nc_obs.sh b/scripts/exregional_run_met_pb2nc_obs.sh index 985cd33c7f..5281021f01 100755 --- a/scripts/exregional_run_met_pb2nc_obs.sh +++ b/scripts/exregional_run_met_pb2nc_obs.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_met_pb2nc_obs" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_met_pb2nc_obs ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_pcpcombine.sh b/scripts/exregional_run_met_pcpcombine.sh index 6e64d102e6..ce9e78ab17 100755 --- a/scripts/exregional_run_met_pcpcombine.sh +++ b/scripts/exregional_run_met_pcpcombine.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_met_pcpcombine|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_met_pcpcombine task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -122,7 +126,7 @@ set_vx_params \ time_lag="0" if [ "${FCST_OR_OBS}" = "FCST" ]; then i="0" - if [ "${DO_ENSEMBLE}" = "TRUE" ]; then + if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]; then i=$( bc -l <<< "${ENSMEM_INDX}-1" ) fi time_lag=$( bc -l <<< "${ENS_TIME_LAG_HRS[$i]}*${SECS_PER_HOUR}" ) @@ -157,7 +161,7 @@ if [ "${FCST_OR_OBS}" = "FCST" ]; then # or, better, just remove this variale and code "/${ensmem_name}" where # slash_ensmem_subdir_or_null currently appears below. # - if [ "${DO_ENSEMBLE}" = "TRUE" ]; then + if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]; then slash_ensmem_subdir_or_null="/${ensmem_name}" else slash_ensmem_subdir_or_null="" diff --git a/scripts/exregional_run_post.sh b/scripts/exregional_run_post.sh index 1bf45bd965..3f0ca93df9 100755 --- a/scripts/exregional_run_post.sh +++ b/scripts/exregional_run_post.sh @@ -1,5 +1,62 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The ex-script that runs UPP. +# +# Run-time environment variables: +# +# CDATE +# COMOUT +# DATA_FHR +# DBNROOT +# ENSMEM_INDX +# GLOBAL_VAR_DEFNS_FP +# NET +# PDY +# REDIRECT_OUT_ERR +# SENDDBN +# +# Experiment variables +# +# user: +# EXECdir +# MACHINE +# PARMdir +# RUN_ENVIR +# USHdir +# +# platform: +# PRE_TASK_CMDS +# RUN_CMD_POST +# +# workflow: +# VERBOSE +# +# task_run_fcst: +# DT_ATMOS +# +# task_run_post: +# CUSTOM_POST_CONFIG_FP +# KMP_AFFINITY_RUN_POST +# OMP_NUM_THREADS_RUN_POST +# OMP_STACKSIZE_RUN_POST +# NUMX +# POST_OUTPUT_DOMAIN_NAME +# SUB_HOURLY_POST +# USE_CUSTOM_POST_CONFIG_FILE +# +# global: +# CRTM_DIR +# USE_CRTM +# +# cpl_aqm_parm: +# CPL_AQM +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +65,10 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow global cpl_aqm_parm \ + task_run_fcst task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -82,7 +142,7 @@ fi # rm -f fort.* cp ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat -if [ ${USE_CUSTOM_POST_CONFIG_FILE} = "TRUE" ]; then +if [ $(boolify ${USE_CUSTOM_POST_CONFIG_FILE}) = "TRUE" ]; then post_config_fp="${CUSTOM_POST_CONFIG_FP}" print_info_msg " ==================================================================== @@ -92,7 +152,7 @@ to the temporary work directory (DATA_FHR): DATA_FHR = \"${DATA_FHR}\" ====================================================================" else - if [ "${CPL_AQM}" = "TRUE" ]; then + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then post_config_fp="${PARMdir}/upp/postxconfig-NT-AQM.txt" else post_config_fp="${PARMdir}/upp/postxconfig-NT-fv3lam.txt" @@ -107,7 +167,7 @@ temporary work directory (DATA_FHR): fi cp ${post_config_fp} ./postxconfig-NT.txt cp ${PARMdir}/upp/params_grib2_tbl_new . -if [ ${USE_CRTM} = "TRUE" ]; then +if [ $(boolify ${USE_CRTM}) = "TRUE" ]; then cp ${CRTM_DIR}/Nalli.IRwater.EmisCoeff.bin ./ cp ${CRTM_DIR}/FAST*.bin ./ cp ${CRTM_DIR}/NPOESS.IRland.EmisCoeff.bin ./ @@ -155,7 +215,7 @@ hh=${cyc} # must be set to a null string. # mnts_secs_str="" -if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then +if [ $(boolify "${SUB_HOURLY_POST}") = "TRUE" ]; then if [ ${fhr}${fmn} = "00000" ]; then mnts_secs_str=":"$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${DT_ATMOS} seconds" "+%M:%S" ) else @@ -185,7 +245,7 @@ post_mn=${post_time:10:2} # # Create the input namelist file to the post-processor executable. # -if [ "${CPL_AQM}" = "TRUE" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then post_itag_add="aqf_on=.true.," else post_itag_add="" @@ -273,7 +333,7 @@ post_renamed_fn_suffix="f${fhr}${post_mn_or_null}.${POST_OUTPUT_DOMAIN_NAME}.gri cd "${COMOUT}" basetime=$( $DATE_UTIL --date "$yyyymmdd $hh" +%y%j%H%M ) symlink_suffix="${dot_ensmem}.${basetime}f${fhr}${post_mn}" -if [ "${CPL_AQM}" = "TRUE" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then fids=( "cmaq" ) else fids=( "prslev" "natlev" ) @@ -287,7 +347,7 @@ for fid in "${fids[@]}"; do create_symlink_to_file ${post_renamed_fn} ${FID}${symlink_suffix} TRUE fi # DBN alert - if [ $SENDDBN = "TRUE" ]; then + if [ "$SENDDBN" = "TRUE" ]; then $DBNROOT/bin/dbn_alert MODEL rrfs_post ${job} ${COMOUT}/${post_renamed_fn} fi done diff --git a/scripts/exregional_run_prdgen.sh b/scripts/exregional_run_prdgen.sh index 5d1bfbf447..5baa779821 100755 --- a/scripts/exregional_run_prdgen.sh +++ b/scripts/exregional_run_prdgen.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_prdgen|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post task_run_prdgen ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -175,7 +179,7 @@ done # Remap to additional output grids if requested #----------------------------------------------- -if [ ${DO_PARALLEL_PRDGEN} == "TRUE" ]; then +if [ $(boolify ${DO_PARALLEL_PRDGEN}) = "TRUE" ]; then # # parallel run wgrib2 for product generation # diff --git a/scripts/exsrw_aqm_ics.sh b/scripts/exsrw_aqm_ics.sh index efd833b092..4fd040e597 100755 --- a/scripts/exsrw_aqm_ics.sh +++ b/scripts/exsrw_aqm_ics.sh @@ -8,7 +8,10 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_aqm_ics" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exsrw_aqm_lbcs.sh b/scripts/exsrw_aqm_lbcs.sh index 93dc119ec2..7b3058ef34 100755 --- a/scripts/exsrw_aqm_lbcs.sh +++ b/scripts/exsrw_aqm_lbcs.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_get_extrn_lbcs|task_make_orog|task_make_lbcs|cpl_aqm_parm|task_aqm_lbcs" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_get_extrn_lbcs task_make_lbcs task_make_orog ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -100,7 +104,7 @@ for hr in 0 ${LBC_SPEC_FCST_HRS[@]}; do cp -p "${DATA_SHARE}/${aqm_lbcs_fn}" ${DATA} done -if [ "${DO_AQM_CHEM_LBCS}" = "TRUE" ]; then +if [ $(boolify "${DO_AQM_CHEM_LBCS}") = "TRUE" ]; then ext_lbcs_file="${AQM_LBCS_FILES}" chem_lbcs_fn=${ext_lbcs_file///${MM}} chem_lbcs_fp="${FIXaqm}/chemlbc/${chem_lbcs_fn}" @@ -141,7 +145,7 @@ fi # #----------------------------------------------------------------------- # -if [ "${DO_AQM_GEFS_LBCS}" = "TRUE" ]; then +if [ $(boolify "${DO_AQM_GEFS_LBCS}") = "TRUE" ]; then AQM_GEFS_FILE_CYC=${AQM_GEFS_FILE_CYC:-"${HH}"} AQM_GEFS_FILE_CYC=$( printf "%02d" "${AQM_GEFS_FILE_CYC}" ) @@ -153,7 +157,7 @@ if [ "${DO_AQM_GEFS_LBCS}" = "TRUE" ]; then fi aqm_mofile_fn="${AQM_GEFS_FILE_PREFIX}.t${AQM_GEFS_FILE_CYC}z.atmf" - if [ "${DO_REAL_TIME}" = "TRUE" ]; then + if [ $(boolify "${DO_REAL_TIME}") = "TRUE" ]; then aqm_mofile_fp="${COMINgefs}/gefs.${YYYYMMDD}/${AQM_GEFS_FILE_CYC}/chem/sfcsig/${aqm_mofile_fn}" else aqm_mofile_fp="${COMINgefs}/${YYYYMMDD}/${AQM_GEFS_FILE_CYC}/${aqm_mofile_fn}" diff --git a/scripts/exsrw_bias_correction_o3.sh b/scripts/exsrw_bias_correction_o3.sh index 1ef4012528..343e7e6f2b 100755 --- a/scripts/exsrw_bias_correction_o3.sh +++ b/scripts/exsrw_bias_correction_o3.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_bias_correction_o3" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_bias_correction_o3 ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -199,7 +203,7 @@ POST_STEP cp ${DATA}/out/ozone/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/ozone/${yyyy} -if [ "${DO_AQM_SAVE_AIRNOW_HIST}" = "TRUE" ]; then +if [ $(boolify "${DO_AQM_SAVE_AIRNOW_HIST}") = "TRUE" ]; then mkdir -p ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/ozone/${yyyy} cp ${DATA}/out/ozone/${yyyy}/*nc ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/ozone/${yyyy} diff --git a/scripts/exsrw_bias_correction_pm25.sh b/scripts/exsrw_bias_correction_pm25.sh index ae1a2d6f65..70cf512589 100755 --- a/scripts/exsrw_bias_correction_pm25.sh +++ b/scripts/exsrw_bias_correction_pm25.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_bias_correction_pm25" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_bias_correction_pm25 ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -198,7 +202,7 @@ POST_STEP cp ${DATA}/out/pm25/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/pm25/${yyyy} -if [ "${DO_AQM_SAVE_AIRNOW_HIST}" = "TRUE" ]; then +if [ $(boolify "${DO_AQM_SAVE_AIRNOW_HIST}") = "TRUE" ]; then mkdir -p ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/pm25/${yyyy} cp ${DATA}/out/pm25/${yyyy}/*nc ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/pm25/${yyyy} fi diff --git a/scripts/exsrw_fire_emission.sh b/scripts/exsrw_fire_emission.sh index cb44c99d8d..3ae78422f5 100755 --- a/scripts/exsrw_fire_emission.sh +++ b/scripts/exsrw_fire_emission.sh @@ -8,7 +8,10 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_fire_emission" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exsrw_nexus_emission.sh b/scripts/exsrw_nexus_emission.sh index a5769a6483..0fa8c48754 100755 --- a/scripts/exsrw_nexus_emission.sh +++ b/scripts/exsrw_nexus_emission.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_nexus_emission" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_nexus_emission ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exsrw_nexus_gfs_sfc.sh b/scripts/exsrw_nexus_gfs_sfc.sh index 103842d46f..cadc27b89c 100755 --- a/scripts/exsrw_nexus_gfs_sfc.sh +++ b/scripts/exsrw_nexus_gfs_sfc.sh @@ -8,7 +8,10 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_nexus_gfs_sfc" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -73,7 +76,7 @@ fcst_len_hrs_offset=$(( FCST_LEN_HRS + TIME_OFFSET_HRS )) GFS_SFC_TAR_DIR="${NEXUS_GFS_SFC_ARCHV_DIR}/rh${YYYY}/${YYYYMM}/${YYYYMMDD}" GFS_SFC_TAR_SUB_DIR="gfs.${YYYYMMDD}/${HH}/atmos" -if [ "${DO_REAL_TIME}" = "TRUE" ]; then +if [ $(boolify "${DO_REAL_TIME}") = "TRUE" ]; then GFS_SFC_LOCAL_DIR="${COMINgfs}/${GFS_SFC_TAR_SUB_DIR}" else GFS_SFC_LOCAL_DIR="${NEXUS_GFS_SFC_DIR}/${GFS_SFC_TAR_SUB_DIR}" diff --git a/scripts/exsrw_nexus_post_split.sh b/scripts/exsrw_nexus_post_split.sh index 517893b5e5..151e0a2ea5 100755 --- a/scripts/exsrw_nexus_post_split.sh +++ b/scripts/exsrw_nexus_post_split.sh @@ -8,7 +8,10 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_nexus_post_split" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exsrw_point_source.sh b/scripts/exsrw_point_source.sh index 7acbc946f7..4cd693506c 100755 --- a/scripts/exsrw_point_source.sh +++ b/scripts/exsrw_point_source.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_run_fcst|cpl_aqm_parm|task_point_source" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_point_source task_run_fcst ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exsrw_post_stat_o3.sh b/scripts/exsrw_post_stat_o3.sh index 6fa1db7f8f..dfcdd24ffa 100755 --- a/scripts/exsrw_post_stat_o3.sh +++ b/scripts/exsrw_post_stat_o3.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_o3" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exsrw_post_stat_pm25.sh b/scripts/exsrw_post_stat_pm25.sh index ea7c1717c3..bdbf1fcbc5 100755 --- a/scripts/exsrw_post_stat_pm25.sh +++ b/scripts/exsrw_post_stat_pm25.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_pm25" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exsrw_pre_post_stat.sh b/scripts/exsrw_pre_post_stat.sh index dfb4c2cf9e..f6ec6a9a7d 100755 --- a/scripts/exsrw_pre_post_stat.sh +++ b/scripts/exsrw_pre_post_stat.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . ${USHsrw}/source_util_funcs.sh -source_config_for_task "task_pre_post|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/tests/WE2E/utils.py b/tests/WE2E/utils.py index fb96dab004..eb3c49fcba 100755 --- a/tests/WE2E/utils.py +++ b/tests/WE2E/utils.py @@ -21,7 +21,7 @@ cfg_to_yaml_str, flatten_dict, load_config_file, - load_shell_config + load_yaml_config ) REPORT_WIDTH = 100 @@ -154,13 +154,13 @@ def calculate_core_hours(expts_dict: dict) -> dict: for expt in expts_dict: # Read variable definitions file - vardefs_file = os.path.join(expts_dict[expt]["expt_dir"],"var_defns.sh") + vardefs_file = os.path.join(expts_dict[expt]["expt_dir"],"var_defns.yaml") if not os.path.isfile(vardefs_file): logging.warning(f"\nWARNING: For experiment {expt}, variable definitions file") logging.warning(f"{vardefs_file}\ndoes not exist!\n\nDropping experiment from summary") continue logging.debug(f'Reading variable definitions file {vardefs_file}') - vardefs = load_shell_config(vardefs_file) + vardefs = load_yaml_config(vardefs_file) vdf = flatten_dict(vardefs) cores_per_node = vdf["NCORES_PER_NODE"] for task in expts_dict[expt]: diff --git a/tests/test_python/test_retrieve_data.py b/tests/test_python/test_retrieve_data.py index 1d54e0904c..2c749c97ac 100644 --- a/tests/test_python/test_retrieve_data.py +++ b/tests/test_python/test_retrieve_data.py @@ -493,61 +493,3 @@ def test_ufs_lbcs_from_aws(self): # Testing that there is no failure retrieve_data.main(args) - - @unittest.skipIf(os.environ.get("CI") == "true", "Skipping HPSS tests") - def test_rap_obs_from_hpss(self): - - """Get RAP observations from hpss for a 06z time""" - - with tempfile.TemporaryDirectory(dir=self.path) as tmp_dir: - os.chdir(tmp_dir) - - # fmt: off - args = [ - '--file_set', 'obs', - '--config', self.config, - '--cycle_date', '2023032106', - '--data_stores', 'hpss', - '--data_type', 'RAP_obs', - '--output_path', tmp_dir, - '--debug', - ] - # fmt: on - - retrieve_data.main(args) - - # Verify files exist in temp dir - - path = os.path.join(tmp_dir, "*") - files_on_disk = glob.glob(path) - self.assertEqual(len(files_on_disk), 30) - - @unittest.skipIf(os.environ.get("CI") == "true", "Skipping HPSS tests") - def test_rap_e_obs_from_hpss(self): - - """Get RAP observations from hpss for a 12z time; - at 00z and 12z we expect to see additional files - with the 'rap_e' naming convention""" - - with tempfile.TemporaryDirectory(dir=self.path) as tmp_dir: - os.chdir(tmp_dir) - - # fmt: off - args = [ - '--file_set', 'obs', - '--config', self.config, - '--cycle_date', '2023032112', - '--data_stores', 'hpss', - '--data_type', 'RAP_obs', - '--output_path', tmp_dir, - '--debug', - ] - # fmt: on - - retrieve_data.main(args) - - # Verify files exist in temp dir - - path = os.path.join(tmp_dir, "*") - files_on_disk = glob.glob(path) - self.assertEqual(len(files_on_disk), 37) diff --git a/ush/bash_utils/check_var_valid_value.sh b/ush/bash_utils/check_var_valid_value.sh index 21288184db..5b942c1f73 100644 --- a/ush/bash_utils/check_var_valid_value.sh +++ b/ush/bash_utils/check_var_valid_value.sh @@ -96,7 +96,7 @@ where the arguments are defined as follows: var_value=${!var_name} valid_var_values_at="$valid_var_values_array_name[@]" - valid_var_values=("${!valid_var_values_at}") + valid_var_values=("${!valid_var_values_at:-}") if [ "$#" -eq 3 ]; then err_msg="$3" diff --git a/ush/bash_utils/create_symlink_to_file.sh b/ush/bash_utils/create_symlink_to_file.sh index c6a5213326..0cfcdc9fdf 100644 --- a/ush/bash_utils/create_symlink_to_file.sh +++ b/ush/bash_utils/create_symlink_to_file.sh @@ -30,6 +30,7 @@ fi target=$1 symlink=$2 relative=${3:-TRUE} +relative=$(boolify $relative) # #----------------------------------------------------------------------- # diff --git a/ush/bash_utils/print_msg.sh b/ush/bash_utils/print_msg.sh index 28a70d1431..8b032f9698 100644 --- a/ush/bash_utils/print_msg.sh +++ b/ush/bash_utils/print_msg.sh @@ -68,7 +68,7 @@ function print_info_msg() { elif [ "$#" -eq 2 ]; then - verbose="$1" + verbose=$(boolify "$1") info_msg="$2" # #----------------------------------------------------------------------- diff --git a/ush/bash_utils/source_config.sh b/ush/bash_utils/source_config.sh deleted file mode 100644 index df5a79a0df..0000000000 --- a/ush/bash_utils/source_config.sh +++ /dev/null @@ -1,53 +0,0 @@ -# -#----------------------------------------------------------------------- -# This file defines function that sources a config file (yaml/json etc) -# into the calling shell script -#----------------------------------------------------------------------- -# - -function config_to_str() { - $USHdir/config_utils.py -o $1 -c $2 "${@:3}" -} - -# -#----------------------------------------------------------------------- -# Define functions for different file formats -#----------------------------------------------------------------------- -# -function config_to_shell_str() { - config_to_str shell "$@" -} -function config_to_ini_str() { - config_to_str ini "$@" -} -function config_to_yaml_str() { - config_to_str yaml "$@" -} -function config_to_json_str() { - config_to_str json "$@" -} -function config_to_xml_str() { - config_to_str xml "$@" -} - -# -#----------------------------------------------------------------------- -# Source contents of a config file to shell script -#----------------------------------------------------------------------- -# -function source_config() { - - source <( config_to_shell_str "$@" ) - -} -# -#----------------------------------------------------------------------- -# Source partial contents of a config file to shell script. -# Only those variables needed by the task are sourced -#----------------------------------------------------------------------- -# -function source_config_for_task() { - - source <( config_to_shell_str "${@:2}" -k "(^(?!task_)|$1).*" ) - -} diff --git a/ush/bash_utils/source_yaml.sh b/ush/bash_utils/source_yaml.sh new file mode 100644 index 0000000000..669408416e --- /dev/null +++ b/ush/bash_utils/source_yaml.sh @@ -0,0 +1,36 @@ + + +function source_yaml () { + + local func_name="${FUNCNAME[0]}" + + if [ "$#" -lt 1 ] ; then + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: ${func_name} + Number of args specified: $# + +Usage: + + ${func_name} yaml_file [section] + + yaml_file: path to the YAML file to source + section: optional subsection of yaml +" + fi + local section + yaml_file=$1 + section=$2 + + while read -r line ; do + + + # A regex to match list representations + line=$(echo "$line" | sed -E "s/='\[(.*)\]'/=(\1)/") + line=${line//,/} + line=${line//\"/} + line=${line/None/} + source <( echo "${line}" ) + done < <(uw config realize -i "${yaml_file}" --output-format sh --key-path $section) +} diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index c9c0fc7cb8..90651c1b7f 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -468,7 +468,7 @@ workflow: # #----------------------------------------------------------------------- # - WORKFLOW_ID: !nowtimestamp '' + WORKFLOW_ID: "" # #----------------------------------------------------------------------- # @@ -718,13 +718,11 @@ workflow: # script creates and that defines the workflow for the experiment. # # GLOBAL_VAR_DEFNS_FN: - # Name of file (a shell script) containing the definitions of the primary - # experiment variables (parameters) defined in this default configuration - # script and in the user-specified configuration as well as secondary - # experiment variables generated by the experiment generation script. - # This file is sourced by many scripts (e.g. the J-job scripts corresponding - # to each workflow task) in order to make all the experiment variables - # available in those scripts. + # Name of the experiment configuration file. It contains the primary + # experiment variables defined in this default configuration script and in the + # user-specified configuration as well as secondary experiment variables + # generated by the experiment generation script. This file is the primary + # source of information used in the scripts at run time. # # ROCOTO_YAML_FN: # Name of the YAML file containing the YAML workflow definition from @@ -772,7 +770,7 @@ workflow: FCST_MODEL: "ufs-weather-model" WFLOW_XML_FN: "FV3LAM_wflow.xml" - GLOBAL_VAR_DEFNS_FN: "var_defns.sh" + GLOBAL_VAR_DEFNS_FN: "var_defns.yaml" ROCOTO_YAML_FN: "rocoto_defns.yaml" EXTRN_MDL_VAR_DEFNS_FN: "extrn_mdl_var_defns" WFLOW_LAUNCH_SCRIPT_FN: "launch_FV3LAM_wflow.sh" diff --git a/ush/create_aqm_rc_file.py b/ush/create_aqm_rc_file.py index 739a4d9f18..c37ed05d29 100644 --- a/ush/create_aqm_rc_file.py +++ b/ush/create_aqm_rc_file.py @@ -13,7 +13,7 @@ cfg_to_yaml_str, flatten_dict, import_vars, - load_shell_config, + load_yaml_config, print_info_msg, print_input_args, str_to_type, @@ -158,7 +158,7 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + cfg = load_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) import_vars(dictionary=cfg) create_aqm_rc_file( diff --git a/ush/create_diag_table_file.py b/ush/create_diag_table_file.py index 975165dfe5..113953172d 100644 --- a/ush/create_diag_table_file.py +++ b/ush/create_diag_table_file.py @@ -14,7 +14,7 @@ cfg_to_yaml_str, flatten_dict, import_vars, - load_shell_config, + load_yaml_config, print_info_msg, print_input_args, ) @@ -102,7 +102,7 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + cfg = load_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) import_vars(dictionary=cfg) create_diag_table_file(args.run_dir) diff --git a/ush/create_model_configure_file.py b/ush/create_model_configure_file.py index cd39087688..b8767f635a 100644 --- a/ush/create_model_configure_file.py +++ b/ush/create_model_configure_file.py @@ -13,7 +13,7 @@ cfg_to_yaml_str, flatten_dict, import_vars, - load_shell_config, + load_yaml_config, lowercase, print_info_msg, print_input_args, @@ -296,7 +296,7 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + cfg = load_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) import_vars(dictionary=cfg) create_model_configure_file( diff --git a/ush/create_ufs_configure_file.py b/ush/create_ufs_configure_file.py index 9d4ea8afa4..3fd82f488b 100644 --- a/ush/create_ufs_configure_file.py +++ b/ush/create_ufs_configure_file.py @@ -15,7 +15,7 @@ cfg_to_yaml_str, flatten_dict, import_vars, - load_shell_config, + load_yaml_config, print_info_msg, print_input_args, ) @@ -113,7 +113,7 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + cfg = load_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) import_vars(dictionary=cfg) create_ufs_configure_file( diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index ba0e9f3a2b..c671a69da8 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -11,12 +11,15 @@ import logging import os import sys +from stat import S_IXUSR +from string import Template from textwrap import dedent from uwtools.api.config import get_nml_config, get_yaml_config, realize from uwtools.api.template import render from python_utils import ( + list_to_str, log_info, import_vars, export_vars, @@ -24,7 +27,6 @@ ln_vrfy, mkdir_vrfy, mv_vrfy, - create_symlink_to_file, check_for_preexist_dir_file, cfg_to_yaml_str, find_pattern_in_str, @@ -137,9 +139,23 @@ def generate_FV3LAM_wflow( verbose=debug, ) - create_symlink_to_file( - wflow_launch_script_fp, os.path.join(exptdir, wflow_launch_script_fn), False - ) + with open(wflow_launch_script_fp, "r", encoding='utf-8') as launch_script_file: + launch_script_content = launch_script_file.read() + + # Stage an experiment-specific launch file in the experiment directory + template = Template(launch_script_content) + + # The script needs several variables from the workflow and user sections + template_variables = {**expt_config["user"], **expt_config["workflow"], + "valid_vals_BOOLEAN": list_to_str(expt_config["constants"]["valid_vals_BOOLEAN"])} + launch_content = template.safe_substitute(template_variables) + + launch_fp = os.path.join(exptdir, wflow_launch_script_fn) + with open(launch_fp, "w", encoding='utf-8') as expt_launch_fn: + expt_launch_fn.write(launch_content) + + os.chmod(launch_fp, os.stat(launch_fp).st_mode|S_IXUSR) + # # ----------------------------------------------------------------------- # @@ -639,7 +655,7 @@ def generate_FV3LAM_wflow( input_format="nml", output_file=FV3_NML_STOCH_FP, output_format="nml", - supplemental_configs=[settings], + update_config=get_nml_config(settings), ) # diff --git a/ush/job_preamble.sh b/ush/job_preamble.sh index 16b99393a2..ecfb94fb50 100644 --- a/ush/job_preamble.sh +++ b/ush/job_preamble.sh @@ -1,5 +1,7 @@ #!/bin/bash +set +u + # #----------------------------------------------------------------------- # @@ -67,13 +69,13 @@ export COMOUTwmo="${COMOUTwmo:-${COMOUT}/wmo}" # #----------------------------------------------------------------------- # -if [ ${subcyc} -ne 0 ]; then +if [ ${subcyc:-0} -ne 0 ]; then export cycle="t${cyc}${subcyc}z" else export cycle="t${cyc}z" fi -if [ "${RUN_ENVIR}" = "nco" ] && [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z $ENSMEM_INDX ]; then +if [ "${RUN_ENVIR}" = "nco" ] && [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z $ENSMEM_INDX ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= @@ -215,4 +217,3 @@ In directory: \"${scrfunc_dir}\" ========================================================================" } - diff --git a/ush/launch_FV3LAM_wflow.sh b/ush/launch_FV3LAM_wflow.sh old mode 100755 new mode 100644 index 92dd24aee6..7c26511f4f --- a/ush/launch_FV3LAM_wflow.sh +++ b/ush/launch_FV3LAM_wflow.sh @@ -34,43 +34,10 @@ scrfunc_dir=$( dirname "${scrfunc_fp}" ) # #----------------------------------------------------------------------- # -# Get the experiment directory. We assume that there is a symlink to -# this script in the experiment directory, and this script is called via -# that symlink. Thus, finding the directory in which the symlink is -# located will give us the experiment directory. We find this by first -# obtaining the directory portion (i.e. the portion without the name of -# this script) of the command that was used to called this script (i.e. -# "$0") and then use the "readlink -f" command to obtain the corresponding -# absolute path. This will work for all four of the following ways in -# which the symlink in the experiment directory pointing to this script -# may be called: -# -# 1) Call this script from the experiment directory: -# > cd /path/to/experiment/directory -# > launch_FV3LAM_wflow.sh -# -# 2) Call this script from the experiment directory but using "./" before -# the script name: -# > cd /path/to/experiment/directory -# > ./launch_FV3LAM_wflow.sh -# -# 3) Call this script from any directory using the absolute path to the -# symlink in the experiment directory: -# > /path/to/experiment/directory/launch_FV3LAM_wflow.sh -# -# 4) Call this script from a directory that is several levels up from the -# experiment directory (but not necessarily at the root directory): -# > cd /path/to -# > experiment/directory/launch_FV3LAM_wflow.sh -# -# Note that given just a file name, e.g. the name of this script without -# any path before it, the "dirname" command will return a ".", e.g. in -# bash, -# -# > exptdir=$( dirname "launch_FV3LAM_wflow.sh" ) -# > echo $exptdir -# -# will print out ".". +# This script will be configured for a specific experiment when +# generate_FV3LAM_wflow.py. That process fills in what is necessary so +# this configured script in the experiment directory will need no +# additional information at run time. # #----------------------------------------------------------------------- # @@ -94,7 +61,12 @@ fi # #----------------------------------------------------------------------- # -. $exptdir/var_defns.sh + +# These variables are assumed to exist in the global environment by the +# bash_utils, which is a Very Bad (TM) thing. +export USHdir=$USHdir +export valid_vals_BOOLEAN=${valid_vals_BOOLEAN} + . $USHdir/source_util_funcs.sh # #----------------------------------------------------------------------- @@ -369,7 +341,7 @@ by expt_name has completed with the following workflow status (wflow_status): # Thus, there is no need to try to relaunch it. We also append a message # to the completion message above to indicate this. # - if [ "${USE_CRON_TO_RELAUNCH}" = "TRUE" ]; then + if [ $(boolify "${USE_CRON_TO_RELAUNCH}") = "TRUE" ]; then msg="${msg}\ Thus, there is no need to relaunch the workflow via a cron job. Removing diff --git a/ush/link_fix.py b/ush/link_fix.py index fdd9a65f28..f0d103d8ea 100755 --- a/ush/link_fix.py +++ b/ush/link_fix.py @@ -18,7 +18,7 @@ cd_vrfy, mkdir_vrfy, find_pattern_in_str, - load_shell_config, + load_yaml_config, ) @@ -403,7 +403,7 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + cfg = load_yaml_config(args.path_to_defns) link_fix( verbose=cfg["workflow"]["VERBOSE"], file_group=args.file_group, diff --git a/ush/load_modules_run_task.sh b/ush/load_modules_run_task.sh index 89f3addf41..5ede278bfd 100755 --- a/ush/load_modules_run_task.sh +++ b/ush/load_modules_run_task.sh @@ -3,33 +3,43 @@ # #----------------------------------------------------------------------- # -# Source necessary files. +# This script loads the appropriate modules for a given task in an +# experiment. # -#----------------------------------------------------------------------- +# It requires the following global environment variables: # -. ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/source_util_funcs.sh +# GLOBAL_VAR_DEFNS_FP # -#----------------------------------------------------------------------- +# And uses these variables from the GLOBAL_VAR_DEFNS_FP file # -# Save current shell options (in a global array). Then set new options -# for this script/function. +# platform: +# BUILD_MOD_FN +# RUN_VER_FN +# +# workflow: +# VERBOSE # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 + +# Get the location of this file -- it's the USHdir +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +USHdir=$( dirname "${scrfunc_fp}" ) +HOMEdir=$( dirname $USHdir ) + +source $USHdir/source_util_funcs.sh + # #----------------------------------------------------------------------- # -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). +# Save current shell options (in a global array). Then set new options +# for this script/function. # #----------------------------------------------------------------------- # -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) +{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 + # #----------------------------------------------------------------------- # @@ -37,7 +47,7 @@ scrfunc_dir=$( dirname "${scrfunc_fp}" ) # #----------------------------------------------------------------------- # -if [ "$#" -ne 2 ]; then +if [ "$#" -ne 3 ]; then print_err_msg_exit " Incorrect number of arguments specified: @@ -46,15 +56,17 @@ Incorrect number of arguments specified: Usage: - ${scrfunc_fn} task_name jjob_fp + ${scrfunc_fn} machine task_name jjob_fp where the arguments are defined as follows: + machine: The name of the supported platform + task_name: The name of the rocoto task for which this script will load modules and launch the J-job. - jjob_fp + jjob_fp: The full path to the J-job script corresponding to task_name. This script will launch this J-job using the \"exec\" command (which will first terminate this script and then launch the j-job; see man page of @@ -65,12 +77,13 @@ fi # #----------------------------------------------------------------------- # -# Get the task name and the name of the J-job script. +# Save arguments # #----------------------------------------------------------------------- # -task_name="$1" -jjob_fp="$2" +machine=$(echo_lowercase $1) +task_name="$2" +jjob_fp="$3" # #----------------------------------------------------------------------- # @@ -99,12 +112,38 @@ set -u #----------------------------------------------------------------------- # default_modules_dir="$HOMEdir/modulefiles" -machine=$(echo_lowercase $MACHINE) -if [ "${WORKFLOW_MANAGER}" != "ecflow" ]; then +test ! $(module is-loaded ecflow > /dev/null 2>&1) && ecflow_loaded=false + +if [ "$ecflow_loaded" = "false" ] ; then source "${HOMEdir}/etc/lmod-setup.sh" ${machine} fi module use "${default_modules_dir}" +# Load workflow environment + +if [ -f ${default_modules_dir}/python_srw.lua ] ; then + module load python_srw || print_err_msg_exit "\ + Loading SRW common python module failed. Expected python_srw.lua + in the modules directory here: + modules_dir = \"${default_modules_dir}\"" +fi + +# Modules that use conda and need an environment activated will set the +# SRW_ENV variable to the name of the environment to be activated. That +# must be done within the script, and not inside the module. Do that +# now. +if [ -n "${SRW_ENV:-}" ] ; then + set +u + conda deactivate + conda activate ${SRW_ENV} + set -u +fi + +# Source the necessary blocks of the experiment config YAML +for sect in platform workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done + if [ "${machine}" != "wcoss2" ]; then module load "${BUILD_MOD_FN}" || print_err_msg_exit "\ Loading of platform- and compiler-specific module file (BUILD_MOD_FN) @@ -116,26 +155,15 @@ fi # #----------------------------------------------------------------------- # -# Set the directory (modules_dir) in which the module files for the va- -# rious workflow tasks are located. Also, set the name of the module -# file for the specified task. -# -# A module file is a file whose first line is the "magic cookie" string -# '#%Module'. It is interpreted by the "module load ..." command. It -# sets environment variables (including prepending/appending to paths) -# and loads modules. -# -# The UFS SRW App repository contains module files for the -# workflow tasks in the template rocoto XML file for the FV3-LAM work- -# flow that need modules not loaded in the BUILD_MOD_FN above. +# Set the directory for the modulefiles included with SRW and the +# specific module for the requested task. # # The full path to a module file for a given task is # # $HOMEdir/modulefiles/$machine/${task_name}.local # -# where HOMEdir is the base directory of the workflow, machine is the -# name of the machine that we're running on (in lowercase), and task_- -# name is the name of the current task (an input to this script). +# where HOMEdir is the SRW clone, machine is the name of the platform +# being used, and task_name is the current task to run. # #----------------------------------------------------------------------- # @@ -154,10 +182,10 @@ Loading modules for task \"${task_name}\" ..." module use "${modules_dir}" || print_err_msg_exit "\ Call to \"module use\" command failed." -# source version file (run) only if it is specified in versions directory -VERSION_FILE="${HOMEdir}/versions/${RUN_VER_FN}" -if [ -f ${VERSION_FILE} ]; then - . ${VERSION_FILE} +# source version file only if it exists in the versions directory +version_file="${HOMEdir}/versions/${RUN_VER_FN}" +if [ -f ${version_file} ]; then + source ${version_file} fi # # Load the .local module file if available for the given task @@ -170,20 +198,11 @@ specified task (task_name) failed: task_name = \"${task_name}\" modulefile_local = \"${modulefile_local}\" modules_dir = \"${modules_dir}\"" -elif [ -f ${default_modules_dir}/python_srw.lua ] ; then - module load python_srw || print_err_msg_exit "\ - Loading SRW common python module failed. Expected python_srw.lua - in the modules directory here: - modules_dir = \"${default_modules_dir}\"" fi - module list -# Modules that use conda and need an environment activated will set the -# SRW_ENV variable to the name of the environment to be activated. That -# must be done within the script, and not inside the module. Do that -# now. - +# Reactivate the workflow environment to ensure the correct Python +# environment is available first in the environment. if [ -n "${SRW_ENV:-}" ] ; then set +u conda deactivate @@ -204,11 +223,7 @@ Launching J-job (jjob_fp) for task \"${task_name}\" ... jjob_fp = \"${jjob_fp}\" " -if [ "${WORKFLOW_MANAGER}" = "ecflow" ]; then - /bin/bash "${jjob_fp}" -else - exec "${jjob_fp}" -fi +source "${jjob_fp}" # #----------------------------------------------------------------------- diff --git a/ush/machine/hera.yaml b/ush/machine/hera.yaml index 4d836af317..80fbb8fc98 100644 --- a/ush/machine/hera.yaml +++ b/ush/machine/hera.yaml @@ -20,8 +20,8 @@ platform: RUN_CMD_UTILS: srun --export=ALL RUN_CMD_NEXUS: srun -n ${nprocs} --export=ALL RUN_CMD_AQMLBC: srun --export=ALL -n ${numts} - SCHED_NATIVE_CMD: --export=NONE - SCHED_NATIVE_CMD_HPSS: -n 1 --export=NONE + SCHED_NATIVE_CMD: "--export=NONE" + SCHED_NATIVE_CMD_HPSS: "-n 1 --export=NONE" PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }' TEST_EXTRN_MDL_SOURCE_BASEDIR: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/input_model_data TEST_AQM_INPUT_BASEDIR: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/aqm_data diff --git a/ush/set_fv3nml_ens_stoch_seeds.py b/ush/set_fv3nml_ens_stoch_seeds.py index 3459fa8707..0b9b186210 100644 --- a/ush/set_fv3nml_ens_stoch_seeds.py +++ b/ush/set_fv3nml_ens_stoch_seeds.py @@ -10,12 +10,12 @@ import sys from textwrap import dedent -from uwtools.api.config import realize +from uwtools.api.config import get_nml_config, realize from python_utils import ( cfg_to_yaml_str, import_vars, - load_shell_config, + load_yaml_config, print_input_args, print_info_msg, ) @@ -112,7 +112,7 @@ def set_fv3nml_ens_stoch_seeds(cdate, expt_config): input_format="nml", output_file=fv3_nml_ensmem_fp, output_format="nml", - supplemental_configs=[settings], + update_config=get_nml_config(settings), ) def parse_args(argv): @@ -142,5 +142,5 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + cfg = load_yaml_config(args.path_to_defns) set_fv3nml_ens_stoch_seeds(args.cdate, cfg) diff --git a/ush/set_fv3nml_sfc_climo_filenames.py b/ush/set_fv3nml_sfc_climo_filenames.py index 417aa0b5ee..7251a5b0e6 100644 --- a/ush/set_fv3nml_sfc_climo_filenames.py +++ b/ush/set_fv3nml_sfc_climo_filenames.py @@ -10,14 +10,14 @@ import sys from textwrap import dedent -from uwtools.api.config import get_yaml_config, realize +from uwtools.api.config import get_nml_config, get_yaml_config, realize from python_utils import ( cfg_to_yaml_str, check_var_valid_value, flatten_dict, import_vars, - load_shell_config, + load_yaml_config, print_info_msg, ) @@ -105,7 +105,7 @@ def set_fv3nml_sfc_climo_filenames(config, debug=False): input_format="nml", output_file=FV3_NML_FP, output_format="nml", - supplemental_configs=[settings], + update_config=get_nml_config(settings), ) def parse_args(argv): @@ -127,6 +127,6 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + cfg = load_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) set_fv3nml_sfc_climo_filenames(cfg, args.debug) diff --git a/ush/setup.py b/ush/setup.py index 0511653fa2..51d5b2a084 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -10,10 +10,12 @@ from textwrap import dedent import yaml +from uwtools.api.config import get_yaml_config from python_utils import ( log_info, cd_vrfy, + date_to_str, mkdir_vrfy, rm_vrfy, check_var_valid_value, @@ -1499,10 +1501,13 @@ def dict_find(user_dict, substring): yaml.Dumper.ignore_aliases = lambda *args : True yaml.dump(expt_config.get("rocoto"), f, sort_keys=False) - var_defns_cfg = copy.deepcopy(expt_config) + var_defns_cfg = get_yaml_config(config=expt_config) del var_defns_cfg["rocoto"] - with open(global_var_defns_fp, "a") as f: - f.write(cfg_to_shell_str(var_defns_cfg)) + + # Fixup a couple of data types: + for dates in ("DATE_FIRST_CYCL", "DATE_LAST_CYCL"): + var_defns_cfg["workflow"][dates] = date_to_str(var_defns_cfg["workflow"][dates]) + var_defns_cfg.dump(global_var_defns_fp) # diff --git a/ush/source_util_funcs.sh b/ush/source_util_funcs.sh index 7fe3025d6a..9feceaf68e 100644 --- a/ush/source_util_funcs.sh +++ b/ush/source_util_funcs.sh @@ -220,15 +220,15 @@ function source_util_funcs() { #----------------------------------------------------------------------- # . ${bashutils_dir}/eval_METplus_timestr_tmpl.sh + # #----------------------------------------------------------------------- # -# Source the file containing the function that sources config files. +# Source the file that sources YAML files as if they were bash # #----------------------------------------------------------------------- # - . ${bashutils_dir}/source_config.sh - + . ${bashutils_dir}/source_yaml.sh } source_util_funcs diff --git a/ush/update_input_nml.py b/ush/update_input_nml.py index e975d9bc08..b85bbacd4a 100644 --- a/ush/update_input_nml.py +++ b/ush/update_input_nml.py @@ -9,7 +9,7 @@ import sys from textwrap import dedent -from uwtools.api.config import realize +from uwtools.api.config import get_nml_config, realize from python_utils import ( print_input_args, @@ -77,7 +77,7 @@ def update_input_nml(namelist, restart, aqm_na_13km): input_format="nml", output_file=namelist, output_format="nml", - supplemental_configs=[settings], + update_config=get_nml_config(settings), ) def parse_args(argv): diff --git a/ush/wrappers/run_fcst.sh b/ush/wrappers/run_fcst.sh index 7450de7cc5..c875cb16c0 100755 --- a/ush/wrappers/run_fcst.sh +++ b/ush/wrappers/run_fcst.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} export cyc=${DATE_FIRST_CYCL:8:2} diff --git a/ush/wrappers/run_get_ics.sh b/ush/wrappers/run_get_ics.sh index 0ee521a67d..494eab6850 100755 --- a/ush/wrappers/run_get_ics.sh +++ b/ush/wrappers/run_get_ics.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow task_get_extrn_ics ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} export cyc=${DATE_FIRST_CYCL:8:2} diff --git a/ush/wrappers/run_get_lbcs.sh b/ush/wrappers/run_get_lbcs.sh index 543ab6e47d..ec6fa23892 100755 --- a/ush/wrappers/run_get_lbcs.sh +++ b/ush/wrappers/run_get_lbcs.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow task_get_extrn_lbcs ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} export cyc=${DATE_FIRST_CYCL:8:2} diff --git a/ush/wrappers/run_make_grid.sh b/ush/wrappers/run_make_grid.sh index 2d55beaf94..f7a6f8aeed 100755 --- a/ush/wrappers/run_make_grid.sh +++ b/ush/wrappers/run_make_grid.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} diff --git a/ush/wrappers/run_make_ics.sh b/ush/wrappers/run_make_ics.sh index 5c629722fc..adcdc16180 100755 --- a/ush/wrappers/run_make_ics.sh +++ b/ush/wrappers/run_make_ics.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} export cyc=${DATE_FIRST_CYCL:8:2} diff --git a/ush/wrappers/run_make_lbcs.sh b/ush/wrappers/run_make_lbcs.sh index 27c94c127f..f9fe35d9da 100755 --- a/ush/wrappers/run_make_lbcs.sh +++ b/ush/wrappers/run_make_lbcs.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} export cyc=${DATE_FIRST_CYCL:8:2} diff --git a/ush/wrappers/run_make_orog.sh b/ush/wrappers/run_make_orog.sh index 5f02ff9599..ebc5259ec1 100755 --- a/ush/wrappers/run_make_orog.sh +++ b/ush/wrappers/run_make_orog.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} diff --git a/ush/wrappers/run_make_sfc_climo.sh b/ush/wrappers/run_make_sfc_climo.sh index fab33f75d6..8024f529fc 100755 --- a/ush/wrappers/run_make_sfc_climo.sh +++ b/ush/wrappers/run_make_sfc_climo.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} diff --git a/ush/wrappers/run_post.sh b/ush/wrappers/run_post.sh index 46ef104365..ca060acb1f 100755 --- a/ush/wrappers/run_post.sh +++ b/ush/wrappers/run_post.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} export cyc=${DATE_FIRST_CYCL:8:2} From 0a933381aa2159cb073e382b113b3e506b518a05 Mon Sep 17 00:00:00 2001 From: EdwardSnyder-NOAA <96196752+EdwardSnyder-NOAA@users.noreply.github.com> Date: Tue, 30 Jul 2024 09:15:29 -0500 Subject: [PATCH 37/42] [develop] Fixes for PW Jenkins Nightly Builds (#1091) * Adds logic to handle GCP's default conda env, which conflicts with the SRW App's conda env. Fixes a Parallel Works naming convention bug in the script. * It also addresses a known issue with a Ruby warning on PW instances that prevents the run_WE2E_tests.py from exiting gracefully. The solution we use in our bootstrap for /contrib doesn't seem to work for the /lustre directory, which is why the warning is hardcoded into the monitor_jobs.py script. * The new spack-stack build on Azure is missing a gnu library, so added the path to this missing library to the proper run scripts and cleaned up the wflow noaacloud lua file. * Removed log and error files from the qsub wrapper script so that qsub can generate these files with the job id in the files name. Also, fixed typo in the wrapper script. --- .cicd/Jenkinsfile | 6 +++--- .cicd/scripts/qsub_srw_ftest.sh | 2 -- .cicd/scripts/srw_ftest.sh | 5 +++++ .cicd/scripts/srw_metric.sh | 12 +++++++----- .cicd/scripts/wrapper_srw_ftest.sh | 2 +- tests/WE2E/utils.py | 2 +- 6 files changed, 17 insertions(+), 12 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 1c92a1bd65..030661bd27 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -235,8 +235,6 @@ pipeline { sh "SRW_WE2E_COMPREHENSIVE_TESTS=${run_we2e_comprehensive_tests} SRW_WE2E_SINGLE_TEST=${single_test}" + ' bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/srw_test.sh"' - // Archive the test log files - sh "[[ -d ${SRW_WE2E_EXPERIMENT_BASE_DIR} ]] && cd ${SRW_WE2E_EXPERIMENT_BASE_DIR} && tar --create --gzip --verbose --dereference --file ${WORKSPACE}/${SRW_PLATFORM}/we2e_test_logs-${SRW_PLATFORM}-${SRW_COMPILER}.tgz */log.generate_FV3LAM_wflow */log/* ${WORKSPACE}/${SRW_PLATFORM}/tests/WE2E/WE2E_tests_*yaml WE2E_summary*txt ${WORKSPACE}/${SRW_PLATFORM}/tests/WE2E/log.* || cat /dev/null > ${WORKSPACE}/${SRW_PLATFORM}/we2e_test_logs-${SRW_PLATFORM}-${SRW_COMPILER}.tgz" } sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/disk_usage.sh"' } @@ -244,10 +242,12 @@ pipeline { post { success { - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/*_test_results-*-*.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/we2e_test_logs-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.tgz", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/*-skill-score.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } always { + // Archive the test log files + sh "[[ -d ${SRW_WE2E_EXPERIMENT_BASE_DIR} ]] && cd ${SRW_WE2E_EXPERIMENT_BASE_DIR} && tar --create --gzip --verbose --dereference --file ${env.WORKSPACE}/${env.SRW_PLATFORM}/we2e_test_logs-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.tgz */log.generate_FV3LAM_wflow */log/* ${env.WORKSPACE}/${env.SRW_PLATFORM}/tests/WE2E/WE2E_tests_*yaml WE2E_summary*txt ${env.WORKSPACE}/${env.SRW_PLATFORM}/tests/WE2E/log.* || cat /dev/null > ${env.WORKSPACE}/${env.SRW_PLATFORM}/we2e_test_logs-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.tgz" + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/*_test_results-*-*.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/we2e_test_logs-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.tgz", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-time-srw_test.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-disk-usage${env.STAGE_NAME}.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] // Remove the data sets from the experiments directory to conserve disk space diff --git a/.cicd/scripts/qsub_srw_ftest.sh b/.cicd/scripts/qsub_srw_ftest.sh index e9f0170a05..8b2569ca69 100644 --- a/.cicd/scripts/qsub_srw_ftest.sh +++ b/.cicd/scripts/qsub_srw_ftest.sh @@ -9,7 +9,5 @@ #PBS -l select=1:ncpus=24:mpiprocs=24:ompthreads=1 #PBS -l walltime=00:30:00 #PBS -V -#PBS -o log_wrap.%j.log -#PBS -e err_wrap.%j.err bash ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/srw_ftest.sh diff --git a/.cicd/scripts/srw_ftest.sh b/.cicd/scripts/srw_ftest.sh index b77ee767f3..95530a89aa 100755 --- a/.cicd/scripts/srw_ftest.sh +++ b/.cicd/scripts/srw_ftest.sh @@ -66,6 +66,9 @@ sed "s|^workflow:|workflow:\n EXEC_SUBDIR: ${workspace}/install_${SRW_COMPILER} # Decrease forecast length since we are running all the steps sed "s|^ FCST_LEN_HRS: 12| FCST_LEN_HRS: 6|g" -i ush/config.yaml +# Update compiler +sed "s|^ COMPILER: intel| COMPILER: ${SRW_COMPILER}|g" -i ush/config.yaml + # DATA_LOCATION differs on each platform ... find it. export DATA_LOCATION=$(grep TEST_EXTRN_MDL_SOURCE_BASEDIR ${workspace}/ush/machine/${platform,,}.yaml | awk '{printf "%s", $2}') echo "DATA_LOCATION=${DATA_LOCATION}" @@ -85,6 +88,8 @@ source etc/lmod-setup.sh ${platform,,} module use modulefiles module load build_${platform,,}_${SRW_COMPILER} module load wflow_${platform,,} +# Deactivate conflicting conda env on GCP +[[ "${SRW_PLATFORM}" =~ "gclusternoaa" ]] && conda deactivate [[ ${FORGIVE_CONDA} == true ]] && set +e +u # Some platforms have incomplete python3 or conda support, but wouldn't necessarily block workflow tests conda activate srw_app diff --git a/.cicd/scripts/srw_metric.sh b/.cicd/scripts/srw_metric.sh index e645a2c916..aec28c2253 100755 --- a/.cicd/scripts/srw_metric.sh +++ b/.cicd/scripts/srw_metric.sh @@ -78,6 +78,8 @@ cd ${workspace} # Activate workflow environment module load wflow_${platform,,} +# Deactivate conflicting conda env on GCP +[[ "${SRW_PLATFORM}" =~ "gclusternoaa" ]] && conda deactivate [[ ${FORGIVE_CONDA} == true ]] && set +e +u # Some platforms have incomplete python3 or conda support, but would not necessarily block workflow tests conda activate srw_app @@ -98,7 +100,7 @@ if [[ ${RUN_STAT_ANLY_OPT} == true ]]; then # Clear out data rm -rf ${workspace}/Indy-Severe-Weather/ # Check if metprd data exists locally otherwise get it from S3 - TEST_EXTRN_MDL_SOURCE_BASEDIR=$(grep TEST_EXTRN_MDL_SOURCE_BASEDIR ${workspace}/ush/machine/${SRW_PLATFORM}.yaml | awk '{print $NF}') + TEST_EXTRN_MDL_SOURCE_BASEDIR=$(grep TEST_EXTRN_MDL_SOURCE_BASEDIR ${workspace}/ush/machine/${platform}.yaml | awk '{print $NF}') if [[ -d $(dirname ${TEST_EXTRN_MDL_SOURCE_BASEDIR})/metprd/point_stat ]] ; then mkdir -p Indy-Severe-Weather/metprd/point_stat cp -rp $(dirname ${TEST_EXTRN_MDL_SOURCE_BASEDIR})/metprd/point_stat Indy-Severe-Weather/metprd @@ -108,7 +110,7 @@ if [[ ${RUN_STAT_ANLY_OPT} == true ]]; then wget https://noaa-ufs-srw-pds.s3.amazonaws.com/sample_cases/release-public-v2.1.0/Indy-Severe-Weather.tgz tar xvfz Indy-Severe-Weather.tgz fi - [[ -f ${platform,,}-${srw_compiler}-skill-score.txt ]] && rm ${platform,,}-${srw_compiler}-skill-score.txt + [[ -f ${SRW_PLATFORM,,}-${srw_compiler}-skill-score.txt ]] && rm ${SRW_PLATFORM,,}-${srw_compiler}-skill-score.txt # Skill score index is computed over several terms that are defined in parm/metplus/STATAnalysisConfig_skill_score. # It is computed by aggregating the output from earlier runs of the Point-Stat and/or Grid-Stat tools over one or more cases. # In this example, skill score index is a weighted average of 4 skill scores of RMSE statistics for wind speed, dew point temperature, @@ -126,15 +128,15 @@ if [[ ${RUN_STAT_ANLY_OPT} == true ]]; then sed -i 's|--load("conda")|load("conda")|g' ${workspace}/modulefiles/tasks/${platform,,}/run_vx.local.lua fi # Run stat_analysis - stat_analysis -config parm/metplus/STATAnalysisConfig_skill_score -lookin ${workspace}/Indy-Severe-Weather/metprd/point_stat -v 2 -out ${platform,,}-${srw_compiler}-skill-score.txt + stat_analysis -config parm/metplus/STATAnalysisConfig_skill_score -lookin ${workspace}/Indy-Severe-Weather/metprd/point_stat -v 2 -out ${SRW_PLATFORM,,}-${srw_compiler}-skill-score.txt # check skill-score.txt - cat ${platform,,}-${srw_compiler}-skill-score.txt + cat ${SRW_PLATFORM,,}-${srw_compiler}-skill-score.txt # get skill-score (SS_INDEX) and check if it is significantly smaller than 1.0 # A value greater than 1.0 indicates that the forecast model outperforms the reference, # while a value less than 1.0 indicates that the reference outperforms the forecast. - tmp_string=$( tail -2 ${platform,,}-${srw_compiler}-skill-score.txt | head -1 ) + tmp_string=$( tail -2 ${SRW_PLATFORM,,}-${srw_compiler}-skill-score.txt | head -1 ) SS_INDEX=$(echo $tmp_string | awk -F " " '{print $NF}') echo "Skill Score: ${SS_INDEX}" if [[ ${SS_INDEX} < "0.700" ]]; then diff --git a/.cicd/scripts/wrapper_srw_ftest.sh b/.cicd/scripts/wrapper_srw_ftest.sh index ee26edadaf..33fd966efa 100755 --- a/.cicd/scripts/wrapper_srw_ftest.sh +++ b/.cicd/scripts/wrapper_srw_ftest.sh @@ -67,7 +67,7 @@ do # Return exit code and check for results file first results_file="${WORKSPACE}/${SRW_PLATFORM}/functional_test_results_${SRW_PLATFORM}_${SRW_COMPILER}.txt" if [ ! -f "$results_file" ]; then - echo "Missing results file! \nexit 1" + echo -e "Missing results file! \nexit 1" exit 1 fi diff --git a/tests/WE2E/utils.py b/tests/WE2E/utils.py index eb3c49fcba..0e6629ad17 100755 --- a/tests/WE2E/utils.py +++ b/tests/WE2E/utils.py @@ -530,7 +530,7 @@ def compare_rocotostat(expt_dict,name): continue line_array = line.split() # Skip header lines - if line_array[0] == 'CYCLE': + if line_array[0] == 'CYCLE' or line_array[0] == '/apps/rocoto/1.3.3/lib/workflowmgr/launchserver.rb:40:': continue # We should now just have lines describing jobs, in the form: # line_array = ['cycle','task','jobid','status','exit status','num tries','walltime'] From dc2310cdd1b37f74b4978c8320b9832c1887b708 Mon Sep 17 00:00:00 2001 From: EdwardSnyder-NOAA <96196752+EdwardSnyder-NOAA@users.noreply.github.com> Date: Thu, 1 Aug 2024 11:38:19 -0500 Subject: [PATCH 38/42] [develop] S3 doc updates (#1104) As part of the data governance initiative, all s3 buckets will need some sort of versioning control. To meet these needs the AWS S3 Bucket was reorganized with the develop data stored under a 'develop-date' folder and the verification sample case and the document case (current_release_data) moved under a new folder called 'experiment-user-cases'. --------- Co-authored-by: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com> --- .cicd/scripts/srw_metric.sh | 2 +- doc/UsersGuide/BuildingRunningTesting/AQM.rst | 2 +- .../ContainerQuickstart.rst | 6 +- .../BuildingRunningTesting/RunSRW.rst | 2 +- .../BuildingRunningTesting/Tutorial.rst | 6 +- .../BuildingRunningTesting/VXCases.rst | 8 +- .../InputOutputFiles.rst | 10 +- doc/tables/fix_file_list.rst | 1514 ++++++++--------- 8 files changed, 775 insertions(+), 775 deletions(-) diff --git a/.cicd/scripts/srw_metric.sh b/.cicd/scripts/srw_metric.sh index aec28c2253..8f6eed85b0 100755 --- a/.cicd/scripts/srw_metric.sh +++ b/.cicd/scripts/srw_metric.sh @@ -107,7 +107,7 @@ if [[ ${RUN_STAT_ANLY_OPT} == true ]]; then elif [[ -f Indy-Severe-Weather.tgz ]]; then tar xvfz Indy-Severe-Weather.tgz else - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/sample_cases/release-public-v2.1.0/Indy-Severe-Weather.tgz + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.1.0/METplus-vx-sample/Indy-Severe-Weather.tgz tar xvfz Indy-Severe-Weather.tgz fi [[ -f ${SRW_PLATFORM,,}-${srw_compiler}-skill-score.txt ]] && rm ${SRW_PLATFORM,,}-${srw_compiler}-skill-score.txt diff --git a/doc/UsersGuide/BuildingRunningTesting/AQM.rst b/doc/UsersGuide/BuildingRunningTesting/AQM.rst index 6d2ae0f193..7186de6618 100644 --- a/doc/UsersGuide/BuildingRunningTesting/AQM.rst +++ b/doc/UsersGuide/BuildingRunningTesting/AQM.rst @@ -123,7 +123,7 @@ The community AQM configuration assumes that users have :term:`HPSS` access and USE_USER_STAGED_EXTRN_FILES: true EXTRN_MDL_SOURCE_BASEDIR_LBCS: /path/to/data -On Level 1 systems, users can find :term:`ICs/LBCs` in the usual :ref:`input data locations ` under ``FV3GFS/netcdf/2023021700`` and ``FV3GFS/netcdf/2023021706``. Users can also download the data required for the community experiment from the `UFS SRW App Data Bucket `__. +On Level 1 systems, users can find :term:`ICs/LBCs` in the usual :ref:`input data locations ` under ``FV3GFS/netcdf/2023021700`` and ``FV3GFS/netcdf/2023021706``. Users can also download the data required for the community experiment from the `UFS SRW App Data Bucket `__. Users may also wish to change :term:`cron`-related parameters in ``config.yaml``. In the ``config.aqm.community.yaml`` file, which was copied into ``config.yaml``, cron is used for automatic submission and resubmission of the workflow: diff --git a/doc/UsersGuide/BuildingRunningTesting/ContainerQuickstart.rst b/doc/UsersGuide/BuildingRunningTesting/ContainerQuickstart.rst index 9e4f58f0bd..d9dd1a0afc 100644 --- a/doc/UsersGuide/BuildingRunningTesting/ContainerQuickstart.rst +++ b/doc/UsersGuide/BuildingRunningTesting/ContainerQuickstart.rst @@ -188,8 +188,8 @@ The SRW App requires input files to run. These include static datasets, initial .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/current_srw_release_data/fix_data.tgz - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/current_srw_release_data/gst_data.tgz + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.2.0/out-of-the-box/fix_data.tgz + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.2.0/out-of-the-box/gst_data.tgz tar -xzf fix_data.tgz tar -xzf gst_data.tgz @@ -439,4 +439,4 @@ If users have the PBS resource manager installed on their system, the allocation For more information on the ``qsub`` command options, see the `PBS Manual §2.59.3 `__, (p. 1416). -These commands should output a hostname. Users can then run ``ssh ``. After "ssh-ing" to the compute node, they can run the container from that node. To run larger experiments, it may be necessary to allocate multiple compute nodes. \ No newline at end of file +These commands should output a hostname. Users can then run ``ssh ``. After "ssh-ing" to the compute node, they can run the container from that node. To run larger experiments, it may be necessary to allocate multiple compute nodes. diff --git a/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst b/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst index 9d42aaf0dc..d7fd7407a8 100644 --- a/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst +++ b/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst @@ -584,7 +584,7 @@ the same cycle starting date/time and forecast hours. Other parameters may diffe Cartopy Shapefiles ````````````````````` -The Python plotting tasks require a path to the directory where the Cartopy Natural Earth shapefiles are located. The medium scale (1:50m) cultural and physical shapefiles are used to create coastlines and other geopolitical borders on the map. On :srw-wiki:`Level 1 ` systems, this path is already set in the system's machine file using the variable ``FIXshp``. Users on other systems will need to download the shapefiles and update the path of ``$FIXshp`` in the machine file they are using (e.g., ``$SRW/ush/machine/macos.yaml`` for a generic MacOS system, where ``$SRW`` is the path to the ``ufs-srweather-app`` directory). The subset of shapefiles required for the plotting task can be obtained from the `SRW Data Bucket `__. The full set of medium-scale (1:50m) Cartopy shapefiles can be downloaded `here `__. +The Python plotting tasks require a path to the directory where the Cartopy Natural Earth shapefiles are located. The medium scale (1:50m) cultural and physical shapefiles are used to create coastlines and other geopolitical borders on the map. On :srw-wiki:`Level 1 ` systems, this path is already set in the system's machine file using the variable ``FIXshp``. Users on other systems will need to download the shapefiles and update the path of ``$FIXshp`` in the machine file they are using (e.g., ``$SRW/ush/machine/macos.yaml`` for a generic MacOS system, where ``$SRW`` is the path to the ``ufs-srweather-app`` directory). The subset of shapefiles required for the plotting task can be obtained from the `SRW Data Bucket `__. The full set of medium-scale (1:50m) Cartopy shapefiles can be downloaded `here `__. Task Configuration ````````````````````` diff --git a/doc/UsersGuide/BuildingRunningTesting/Tutorial.rst b/doc/UsersGuide/BuildingRunningTesting/Tutorial.rst index 445dee1b8f..a21b7aa9bd 100644 --- a/doc/UsersGuide/BuildingRunningTesting/Tutorial.rst +++ b/doc/UsersGuide/BuildingRunningTesting/Tutorial.rst @@ -45,12 +45,12 @@ On :srw-wiki:`Level 1 ` systems, users can fi * FV3GFS data for the first forecast (``control``) is located at: - * https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#input_model_data/FV3GFS/grib2/2019061518/ + * https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#develop-20240618/input_model_data/FV3GFS/grib2/2019061518/ * HRRR and RAP data for the second forecast (``test_expt``) is located at: - * https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#input_model_data/HRRR/2019061518/ - * https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#input_model_data/RAP/2019061518/ + * https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#develop-20240618/input_model_data/HRRR/2019061518/ + * https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#develop-20240618/input_model_data/RAP/2019061518/ Load the Workflow -------------------- diff --git a/doc/UsersGuide/BuildingRunningTesting/VXCases.rst b/doc/UsersGuide/BuildingRunningTesting/VXCases.rst index 080e180b14..2bf6f775d0 100644 --- a/doc/UsersGuide/BuildingRunningTesting/VXCases.rst +++ b/doc/UsersGuide/BuildingRunningTesting/VXCases.rst @@ -45,21 +45,21 @@ On :srw-wiki:`Level 1 ` systems, users can fi On other systems, users need to download the ``Indy-Severe-Weather.tgz`` file using any of the following methods: - #. Download directly from the S3 bucket using a browser. The data is available at https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#sample_cases/release-public-v2.2.0/. + #. Download directly from the S3 bucket using a browser. The data is available at https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#experiment-user-cases/release-public-v2.2.0/METplus-vx-sample/. #. Download from a terminal using the AWS command line interface (CLI), if installed: .. code-block:: console - aws s3 cp https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#sample_cases/release-public-v2.2.0/Indy-Severe-Weather.tgz Indy-Severe-Weather.tgz + aws s3 cp https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#experiment-user-cases/release-public-v2.2.0/METplus-vx-sample/Indy-Severe-Weather.tgz Indy-Severe-Weather.tgz #. Download from a terminal using ``wget``: .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/sample_cases/release-public-v2.2.0/Indy-Severe-Weather.tgz + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.2.0/METplus-vx-sample/Indy-Severe-Weather.tgz -This tar file contains :term:`IC/LBC ` files, observation data, model/forecast output, and MET verification output for the sample forecast. Users who have never run the SRW App on their system before will also need to download (1) the fix files required for SRW App forecasts and (2) the NaturalEarth shapefiles required for plotting. Users can download the fix file data from a browser at https://noaa-ufs-srw-pds.s3.amazonaws.com/current_srw_release_data/fix_data.tgz or visit :numref:`Section %s ` for instructions on how to download the data with ``wget``. NaturalEarth files are available at https://noaa-ufs-srw-pds.s3.amazonaws.com/NaturalEarth/NaturalEarth.tgz. See the :numref:`Section %s ` for more information on plotting. +This tar file contains :term:`IC/LBC ` files, observation data, model/forecast output, and MET verification output for the sample forecast. Users who have never run the SRW App on their system before will also need to download (1) the fix files required for SRW App forecasts and (2) the NaturalEarth shapefiles required for plotting. Users can download the fix file data from a browser at https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.2.0/out-of-the-box/fix_data.tgz or visit :numref:`Section %s ` for instructions on how to download the data with ``wget``. NaturalEarth files are available at https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/NaturalEarth/NaturalEarth.tgz. See the :numref:`Section %s ` for more information on plotting. After downloading ``Indy-Severe-Weather.tgz`` using one of the three methods above, untar the downloaded compressed archive file: diff --git a/doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst b/doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst index 128b080655..40227d7a2b 100644 --- a/doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst +++ b/doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst @@ -225,14 +225,14 @@ A set of input files, including static (fix) data and raw initial and lateral bo Static Files -------------- -Static files are available in the `"fix" directory `__ of the SRW App Data Bucket. Users can download the full set of fix files as a tar file: +Static files are available in the `"fix" directory `__ of the SRW App Data Bucket. Users can download the full set of fix files as a tar file: .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/current_srw_release_data/fix_data.tgz + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.2.0/out-of-the-box/fix_data.tgz tar -xzf fix_data.tgz -Alternatively, users can download the static files individually from the `"fix" directory `__ of the SRW Data Bucket using the ``wget`` command for each required file. Users will need to create an appropriate directory structure for the files when downloading them individually. The best solution is to download the files into directories that mirror the structure of the `Data Bucket `__. +Alternatively, users can download the static files individually from the `"fix" directory `__ of the SRW Data Bucket using the ``wget`` command for each required file. Users will need to create an appropriate directory structure for the files when downloading them individually. The best solution is to download the files into directories that mirror the structure of the `Data Bucket `__. The environment variables ``FIXgsm``, ``FIXorg``, and ``FIXsfc`` indicate the path to the directories where the static files are located. After downloading the experiment data, users must set the paths to the files in ``config.yaml``. Add the following code to the ``task_run_fcst:`` section of the ``config.yaml`` file, and alter the variable paths accordingly: @@ -252,7 +252,7 @@ To download the model input data for the 12-hour "out-of-the-box" experiment con .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/current_srw_release_data/gst_data.tgz + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.2.0/out-of-the-box/gst_data.tgz tar -xzf gst_data.tgz To download data for different dates, model types, and formats, users can explore the ``input_model_data`` section of the data bucket and replace the links above with ones that fetch their desired data. @@ -318,7 +318,7 @@ Default Initial and Lateral Boundary Conditions ----------------------------------------------- The default initial and lateral boundary condition files are set to be a severe weather case from June 15, 2019 (20190615) at 18 UTC. FV3GFS GRIB2 files are the default model and file format. A tar file -(``gst_data.tgz``) containing the model data for this case is available in the `UFS SRW App Data Bucket `__. +(``gst_data.tgz``) containing the model data for this case is available in the `UFS SRW App Data Bucket `__. Running the App for Different Dates ----------------------------------- diff --git a/doc/tables/fix_file_list.rst b/doc/tables/fix_file_list.rst index a20bd39245..628c124bc3 100644 --- a/doc/tables/fix_file_list.rst +++ b/doc/tables/fix_file_list.rst @@ -11,599 +11,599 @@ Static Files for SRW App Release v2.1.0 .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m01.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m02.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m03.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m04.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m06.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m07.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m08.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m09.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m10.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m11.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m12.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m01.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m02.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m03.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m04.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m06.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m07.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m08.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m09.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m10.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m11.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m12.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m01.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m02.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m03.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m04.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m06.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m07.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m08.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m09.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m10.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m11.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m12.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m01.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m02.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m03.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m04.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m06.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m07.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m08.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m09.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m10.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m11.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m12.nc ``fix_am`` Files --------------------- .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/CCN_ACTIVATE.BIN - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/cfs_ice1x1monclim19822001.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/cfs_oi2sst1x1monclim19822001.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/cfs_v2_soilmcpc.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/CFSR.OISST.1982.2010.monthly.clim - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/CFSR.OISST.1999.2012.monthly.clim.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/CFSR.SEAICE.1982.2010.monthly.clim - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/CFSR.SEAICE.1982.2012.monthly.clim.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2monthlycyc.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/emcsfc_gland5min.grib2 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/emcsfc_snow_cover.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/emcsfc_snow_cover_climo.grib2 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/freezeH2O.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/geo_em.d01.lat-lon.2.5m.HGT_M.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/geo_em.d01.nc_HRRR_AK - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/geo_em.d01.nc_HRRRX - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/geo_em.d01.nc_RAPX - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_1x1_paramlist - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_1x1_paramlist.anl - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_1x1_paramlist.f00 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeroinfo.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m01.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m02.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m03.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m04.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m05.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m06.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m07.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m08.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m09.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m10.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m11.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m12.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_albedo4.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_cldtune.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_climaeropac_global.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2con.l28.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2con.l42.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2con.l64.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1956.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1957.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1958.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1959.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1960.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1961.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1962.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1963.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1964.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1965.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1966.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1967.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1968.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1969.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1970.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1971.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1972.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1973.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1974.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1975.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1976.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1977.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1978.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1979.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1980.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1981.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1982.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1983.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1984.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1985.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1986.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1987.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1988.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1989.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1990.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1991.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1992.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1993.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1994.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1995.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1996.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1997.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1998.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1999.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2000.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2001.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2002.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2003.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2004.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2005.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2006.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2007.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2008.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2009.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2010.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2011.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2012.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2013.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_glob.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2monthlycyc1976_2006.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2monthlycyc1976_2007.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2monthlycyc1976_2009.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_divten.l28.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_divten.l42.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_divten.l64.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_emissivity_coefs.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_gaussian_latitudes.t1148.2304.1152.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_gaussian_latitudes.t1534.3072.1536.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_gaussian_latitudes.t574.1152.576.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_gaussian_latitudes.t670.1344.672.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_gaussian_latitudes.t766.1536.768.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_gaussian_latitudes.t94.192.96.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_glacier.2x2.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_h2o_pltc.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hd_paramlist - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hd_paramlist.f00 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l128.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l128C.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l150.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l28.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l42.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l60.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l64.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l64sl.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l65.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l65.txt_0.1hPa - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l91.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l98.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev3.l28.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev3.l42.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev3.l60.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev3.l64.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_iceclim.2x2.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_coeff_hflux.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_coeff_lflux.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_coeff_lte.150 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_coeff_lte.360 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_coeff_lte.540 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_coeff_lte.720 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_ggww_in1.par - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_ggww_in4.par - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_h2ort_kg7t.par - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_h2ovb_kg7t.par - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_wei96.cofcnts - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_kplist.1d.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_kplist.hd.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_kplist.master.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t1148.2304.1152.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t126.384.190.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t1534.3072.1536.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t170.512.256.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t190.384.192.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t190.576.288.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t254.512.256.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t254.768.384.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t3070.6144.3072.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t382.1152.576.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t382.768.384.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t574.1152.576.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t574.1760.880.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t62.192.94.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t670.1344.672.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t766.1536.768.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t878.1760.880.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t878.2640.1320.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t92.192.94.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t94.192.96.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_maskh.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_master-catchup_parmlist - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_maxice.2x2.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t1148.2304.1152.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t126.384.190.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t126.384.190.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t1534.3072.1536.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t1534.3072.1536.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t170.512.256.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t190.384.192.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t190.384.192.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t190.576.288.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t254.512.256.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t254.512.256.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t254.768.384.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t382.1152.576.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t382.768.384.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t382.768.384.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t574.1152.576.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t574.1152.576.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t574.1760.880.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t62.192.94.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t670.1344.672.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t670.1344.672.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t766.1536.768.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t878.1760.880.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t878.2640.1320.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t92.192.94.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t92.192.94.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t126.384.190.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t1534.3072.1536.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t190.384.192.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t190.576.288.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t254.512.256.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t382.768.384.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t574.1152.576.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t62.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t670.1344.672.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t766.1536.768.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t766.1536.768.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t92.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t94.192.96.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_npoess_paramlist - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_o3clim.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_o3prdlos.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t126.384.190.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t126.384.190.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t1534.3072.1536.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t1534.3072.1536.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t190.384.192.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t190.384.192.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t254.512.256.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t254.512.256.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t382.768.384.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t382.768.384.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t574.1152.576.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t574.1152.576.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t62.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t670.1344.672.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t670.1344.672.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t766.1536.768.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t766.1536.768.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t92.192.94.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t92.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t126.384.190.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t126.384.190.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t1534.3072.1536.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t1534.3072.1536.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t190.384.192.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t190.384.192.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t254.512.256.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t254.512.256.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t382.768.384.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t382.768.384.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t574.1152.576.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t574.1152.576.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t62.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t670.1344.672.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t670.1344.672.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t766.1536.768.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t766.1536.768.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t92.192.94.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t92.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_0.5x0.5.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_salclm.t1534.3072.1536.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_sfc_emissivity_idx.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_shdmax.0.144x0.144.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_shdmax.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_shdmin.0.144x0.144.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_shdmin.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_siglevel.l28.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_siglevel.l42.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_siglevel.l64.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t126.384.190.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t126.384.190.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t1534.3072.1536.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t1534.3072.1536.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t190.384.192.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t190.384.192.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t254.512.256.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t254.512.256.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t382.768.384.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t382.768.384.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t574.1152.576.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t574.1152.576.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t62.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t670.1344.672.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t670.1344.672.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t766.1536.768.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t766.1536.768.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t766.1536.768.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t92.192.94.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t92.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slope.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slptyp.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snoalb.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snoalb.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snoclim.1.875.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t126.384.190.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t1534.3072.1536.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t190.384.192.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t190.576.288.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t254.512.256.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t382.768.384.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t574.1152.576.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t62.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t670.1344.672.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t766.1536.768.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t766.1536.768.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t92.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t94.192.96.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmcpc.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t766.1536.768.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t94.192.96.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t190.576.288.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t766.1536.768.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t126.384.190.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t1534.3072.1536.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t190.384.192.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t190.576.288.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t254.512.256.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t382.768.384.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t574.1152.576.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t62.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t670.1344.672.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t766.1536.768.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t766.1536.768.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t92.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t94.192.96.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstant_cmip_an.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstant_cmip_mn.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstant_noaa_a0.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstant_noaa_an.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstant_noaa_an.txt_v2011 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstant_noaa_an.txt_v2019 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstantdata.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_spectral_coefs.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_sstclim.2x2.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_tbthe.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_tg3clim.2.6x1.5.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_transmittance_coefs.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vars.l28.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vars.l42.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vars.l64.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegfrac.0.144.decpercent.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegfrac.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t126.384.190.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t1534.3072.1536.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t190.384.192.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t190.576.288.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t254.512.256.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t382.768.384.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t574.1152.576.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t62.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t670.1344.672.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t766.1536.768.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t766.1536.768.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t92.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t94.192.96.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1850-1859.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1860-1869.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1870-1879.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1880-1889.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1890-1899.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1900-1909.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1910-1919.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1920-1929.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1930-1939.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1940-1949.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1950-1959.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1960-1969.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1970-1979.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1980-1989.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1990-1999.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_zorclim.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/HGT.Beljaars_filtered.lat-lon.30s_res.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/latlon_grid3.32769.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/ozone.clim - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/qr_acr_qg.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/qr_acr_qgV2.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/qr_acr_qs.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/qr_acr_qsV2.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/rrtmgp-cloud-optics-coeffs-lw.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/rrtmgp-cloud-optics-coeffs-sw.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/rrtmgp-data-lw-g256-2018-12-04.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/rrtmgp-data-sw-g224-2018-12-04.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/rrtmgp-lw-prototype-g128-210413.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/rrtmgp-sw-prototype-g131-210413.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/RTGSST.1982.2012.monthly.clim.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/seaice_newland.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/syndat_fildef.vit - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/syndat_slmask.t126.gaussian - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/syndat_stmnames - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/syndat_stmnames_old - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/syndat_stmnames_old1 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/syndat_stmnames_old2 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/Thompson_MP_MONTHLY_CLIMO.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/ugwp_limb_tau.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/CCN_ACTIVATE.BIN + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/cfs_ice1x1monclim19822001.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/cfs_oi2sst1x1monclim19822001.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/cfs_v2_soilmcpc.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/CFSR.OISST.1982.2010.monthly.clim + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/CFSR.OISST.1999.2012.monthly.clim.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/CFSR.SEAICE.1982.2010.monthly.clim + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/CFSR.SEAICE.1982.2012.monthly.clim.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2monthlycyc.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/emcsfc_gland5min.grib2 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/emcsfc_snow_cover.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/emcsfc_snow_cover_climo.grib2 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/freezeH2O.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/geo_em.d01.lat-lon.2.5m.HGT_M.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/geo_em.d01.nc_HRRR_AK + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/geo_em.d01.nc_HRRRX + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/geo_em.d01.nc_RAPX + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_1x1_paramlist + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_1x1_paramlist.anl + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_1x1_paramlist.f00 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeroinfo.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m01.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m02.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m03.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m04.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m05.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m06.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m07.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m08.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m09.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m10.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m11.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m12.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_albedo4.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_cldtune.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_climaeropac_global.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2con.l28.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2con.l42.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2con.l64.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1956.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1957.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1958.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1959.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1960.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1961.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1962.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1963.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1964.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1965.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1966.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1967.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1968.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1969.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1970.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1971.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1972.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1973.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1974.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1975.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1976.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1977.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1978.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1979.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1980.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1981.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1982.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1983.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1984.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1985.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1986.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1987.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1988.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1989.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1990.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1991.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1992.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1993.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1994.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1995.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1996.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1997.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1998.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1999.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2000.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2001.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2002.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2003.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2004.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2005.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2006.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2007.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2008.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2009.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2010.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2011.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2012.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2013.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_glob.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2monthlycyc1976_2006.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2monthlycyc1976_2007.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2monthlycyc1976_2009.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_divten.l28.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_divten.l42.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_divten.l64.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_emissivity_coefs.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_gaussian_latitudes.t1148.2304.1152.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_gaussian_latitudes.t1534.3072.1536.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_gaussian_latitudes.t574.1152.576.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_gaussian_latitudes.t670.1344.672.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_gaussian_latitudes.t766.1536.768.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_gaussian_latitudes.t94.192.96.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_glacier.2x2.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_h2o_pltc.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hd_paramlist + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hd_paramlist.f00 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l128.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l128C.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l150.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l28.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l42.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l60.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l64.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l64sl.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l65.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l65.txt_0.1hPa + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l91.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l98.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev3.l28.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev3.l42.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev3.l60.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev3.l64.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_iceclim.2x2.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_coeff_hflux.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_coeff_lflux.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_coeff_lte.150 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_coeff_lte.360 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_coeff_lte.540 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_coeff_lte.720 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_ggww_in1.par + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_ggww_in4.par + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_h2ort_kg7t.par + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_h2ovb_kg7t.par + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_wei96.cofcnts + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_kplist.1d.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_kplist.hd.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_kplist.master.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t1148.2304.1152.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t126.384.190.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t1534.3072.1536.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t170.512.256.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t190.384.192.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t190.576.288.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t254.512.256.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t254.768.384.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t3070.6144.3072.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t382.1152.576.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t382.768.384.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t574.1152.576.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t574.1760.880.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t62.192.94.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t670.1344.672.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t766.1536.768.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t878.1760.880.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t878.2640.1320.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t92.192.94.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t94.192.96.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_maskh.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_master-catchup_parmlist + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_maxice.2x2.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t1148.2304.1152.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t126.384.190.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t126.384.190.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t1534.3072.1536.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t1534.3072.1536.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t170.512.256.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t190.384.192.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t190.384.192.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t190.576.288.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t254.512.256.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t254.512.256.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t254.768.384.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t382.1152.576.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t382.768.384.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t382.768.384.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t574.1152.576.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t574.1152.576.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t574.1760.880.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t62.192.94.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t670.1344.672.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t670.1344.672.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t766.1536.768.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t878.1760.880.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t878.2640.1320.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t92.192.94.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t92.192.94.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t126.384.190.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t1534.3072.1536.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t190.384.192.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t190.576.288.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t254.512.256.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t382.768.384.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t574.1152.576.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t62.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t670.1344.672.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t766.1536.768.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t766.1536.768.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t92.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t94.192.96.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_npoess_paramlist + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_o3clim.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_o3prdlos.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t126.384.190.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t126.384.190.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t1534.3072.1536.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t1534.3072.1536.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t190.384.192.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t190.384.192.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t254.512.256.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t254.512.256.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t382.768.384.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t382.768.384.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t574.1152.576.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t574.1152.576.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t62.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t670.1344.672.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t670.1344.672.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t766.1536.768.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t766.1536.768.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t92.192.94.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t92.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t126.384.190.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t126.384.190.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t1534.3072.1536.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t1534.3072.1536.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t190.384.192.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t190.384.192.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t254.512.256.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t254.512.256.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t382.768.384.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t382.768.384.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t574.1152.576.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t574.1152.576.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t62.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t670.1344.672.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t670.1344.672.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t766.1536.768.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t766.1536.768.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t92.192.94.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t92.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_0.5x0.5.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_salclm.t1534.3072.1536.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_sfc_emissivity_idx.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_shdmax.0.144x0.144.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_shdmax.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_shdmin.0.144x0.144.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_shdmin.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_siglevel.l28.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_siglevel.l42.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_siglevel.l64.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t126.384.190.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t126.384.190.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t1534.3072.1536.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t1534.3072.1536.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t190.384.192.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t190.384.192.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t254.512.256.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t254.512.256.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t382.768.384.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t382.768.384.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t574.1152.576.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t574.1152.576.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t62.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t670.1344.672.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t670.1344.672.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t766.1536.768.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t766.1536.768.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t766.1536.768.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t92.192.94.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t92.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slope.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slptyp.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snoalb.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snoalb.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snoclim.1.875.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t126.384.190.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t1534.3072.1536.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t190.384.192.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t190.576.288.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t254.512.256.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t382.768.384.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t574.1152.576.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t62.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t670.1344.672.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t766.1536.768.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t766.1536.768.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t92.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t94.192.96.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmcpc.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t766.1536.768.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t94.192.96.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t190.576.288.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t766.1536.768.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t126.384.190.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t1534.3072.1536.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t190.384.192.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t190.576.288.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t254.512.256.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t382.768.384.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t574.1152.576.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t62.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t670.1344.672.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t766.1536.768.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t766.1536.768.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t92.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t94.192.96.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstant_cmip_an.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstant_cmip_mn.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstant_noaa_a0.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstant_noaa_an.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstant_noaa_an.txt_v2011 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstant_noaa_an.txt_v2019 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstantdata.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_spectral_coefs.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_sstclim.2x2.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_tbthe.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_tg3clim.2.6x1.5.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_transmittance_coefs.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vars.l28.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vars.l42.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vars.l64.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegfrac.0.144.decpercent.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegfrac.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t126.384.190.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t1534.3072.1536.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t190.384.192.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t190.576.288.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t254.512.256.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t382.768.384.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t574.1152.576.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t62.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t670.1344.672.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t766.1536.768.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t766.1536.768.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t92.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t94.192.96.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1850-1859.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1860-1869.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1870-1879.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1880-1889.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1890-1899.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1900-1909.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1910-1919.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1920-1929.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1930-1939.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1940-1949.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1950-1959.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1960-1969.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1970-1979.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1980-1989.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1990-1999.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_zorclim.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/HGT.Beljaars_filtered.lat-lon.30s_res.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/latlon_grid3.32769.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/ozone.clim + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/qr_acr_qg.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/qr_acr_qgV2.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/qr_acr_qs.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/qr_acr_qsV2.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/rrtmgp-cloud-optics-coeffs-lw.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/rrtmgp-cloud-optics-coeffs-sw.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/rrtmgp-data-lw-g256-2018-12-04.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/rrtmgp-data-sw-g224-2018-12-04.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/rrtmgp-lw-prototype-g128-210413.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/rrtmgp-sw-prototype-g131-210413.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/RTGSST.1982.2012.monthly.clim.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/seaice_newland.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/syndat_fildef.vit + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/syndat_slmask.t126.gaussian + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/syndat_stmnames + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/syndat_stmnames_old + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/syndat_stmnames_old1 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/syndat_stmnames_old2 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/Thompson_MP_MONTHLY_CLIMO.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/ugwp_limb_tau.nc ``fix_am/co2dat_4a/`` Files: @@ -611,102 +611,102 @@ Static Files for SRW App Release v2.1.0 .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1956.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1957.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1958.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1959.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1960.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1961.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1962.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1963.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1964.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1965.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1966.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1967.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1968.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1969.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1970.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1971.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1972.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1973.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1974.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1975.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1976.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1977.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1978.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1979.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1980.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1981.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1982.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1983.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1984.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1985.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1986.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1987.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1988.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1989.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1990.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1991.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1992.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1993.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1994.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1995.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1996.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1997.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1998.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1999.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2000.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2001.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2002.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2003.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2004.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2005.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2006.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2007.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2008.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2009.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2009.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2009.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2010.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2010.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2010.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2011.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2011.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2011.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2012.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2012.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2012.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2013.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2013.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2013.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2014.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2014.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2014.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2015.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2015.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2015.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2016.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2016.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2016.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2017.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2017.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2017.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2018.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2018.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2018.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2019.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2019.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2019.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2020.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2020.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2020.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2021.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2021.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2022.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_glob.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2monthlycyc1976_2006.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2monthlycyc1976_2009.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/MEMO + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1956.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1957.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1958.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1959.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1960.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1961.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1962.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1963.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1964.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1965.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1966.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1967.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1968.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1969.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1970.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1971.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1972.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1973.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1974.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1975.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1976.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1977.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1978.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1979.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1980.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1981.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1982.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1983.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1984.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1985.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1986.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1987.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1988.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1989.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1990.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1991.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1992.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1993.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1994.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1995.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1996.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1997.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1998.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1999.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2000.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2001.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2002.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2003.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2004.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2005.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2006.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2007.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2008.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2009.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2009.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2009.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2010.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2010.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2010.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2011.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2011.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2011.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2012.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2012.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2012.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2013.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2013.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2013.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2014.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2014.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2014.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2015.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2015.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2015.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2016.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2016.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2016.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2017.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2017.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2017.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2018.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2018.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2018.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2019.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2019.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2019.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2020.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2020.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2020.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2021.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2021.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2022.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_glob.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2monthlycyc1976_2006.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2monthlycyc1976_2009.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/MEMO ``fix_am/fix_co2_proj`` Files: @@ -714,20 +714,20 @@ Static Files for SRW App Release v2.1.0 .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2009.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2010.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2011.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2012.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2013.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2014.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2015.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2016.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2017.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2018.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2019.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2020.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2021.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2022.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2009.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2010.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2011.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2012.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2013.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2014.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2015.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2016.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2017.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2018.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2019.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2020.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2021.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2022.txt ``fix_am/fix_co2_update`` Files: @@ -735,19 +735,19 @@ Static Files for SRW App Release v2.1.0 .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2009.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2010.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2011.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2012.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2013.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2014.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2015.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2016.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2017.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2018.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2019.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2020.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2021.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2009.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2010.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2011.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2012.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2013.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2014.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2015.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2016.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2017.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2018.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2019.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2020.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2021.txt ``fix_lut`` Files @@ -755,12 +755,12 @@ Static Files for SRW App Release v2.1.0 .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_lut/optics_BC.v1_3.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_lut/optics_DU.v15_3.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_lut/optics_DU.v15_3.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_lut/optics_OC.v1_3.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_lut/optics_SS.v3_3.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_lut/optics_SU.v1_3.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_lut/optics_BC.v1_3.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_lut/optics_DU.v15_3.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_lut/optics_DU.v15_3.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_lut/optics_OC.v1_3.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_lut/optics_SS.v3_3.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_lut/optics_SU.v1_3.dat ``fix_orog`` Files @@ -768,26 +768,26 @@ Static Files for SRW App Release v2.1.0 .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/clmgrb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/clmgrb.index - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/convert.f90 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/gmted2010.30sec.flt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/gmted2010.30sec.int - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/gmted2010.30sec.flt.ctl - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/gmted2010.30sec.int.ctl - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/thirty.second.antarctic.new.bin - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/GlobalLakeDepth.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/GlobalLakeDepth.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/GlobalLakeStatus.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/GlobalLakeStatus.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/gtopo30_gg.fine - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/gtopo30_gg.fine.nh - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/landcover30.fixed - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/makefile - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/run.lsf - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/TOP8M_avg.20I4.asc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/TOP8M_max.20I4.asc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/TOP8M_slm.80I1.asc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/clmgrb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/clmgrb.index + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/convert.f90 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/gmted2010.30sec.flt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/gmted2010.30sec.int + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/gmted2010.30sec.flt.ctl + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/gmted2010.30sec.int.ctl + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/thirty.second.antarctic.new.bin + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/GlobalLakeDepth.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/GlobalLakeDepth.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/GlobalLakeStatus.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/GlobalLakeStatus.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/gtopo30_gg.fine + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/gtopo30_gg.fine.nh + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/landcover30.fixed + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/makefile + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/run.lsf + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/TOP8M_avg.20I4.asc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/TOP8M_max.20I4.asc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/TOP8M_slm.80I1.asc @@ -796,26 +796,26 @@ Static Files for SRW App Release v2.1.0 .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/facsf.1.0.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.igbp.0.03.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/leaf_area_index.0.05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.igbp.0.05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/maximum_snow_albedo.0.05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.igbp.conus.0.01.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/slope_type.1.0.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.modis.igbp.0.03.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/snowfree_albedo.4comp.0.05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.modis.igbp.0.05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/soil_type.statsgo.0.03.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.modis.igbp.conus.0.01.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/soil_type.statsgo.0.05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.viirs.igbp.0.03.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/soil_type.statsgo.conus.0.01.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.viirs.igbp.0.05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/substrate_temperature.1.0.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.viirs.igbp.0.1.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/substrate_temperature.2.6x1.5.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.viirs.igbp.conus.0.01.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_greenness.0.144.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/facsf.1.0.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.igbp.0.03.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/leaf_area_index.0.05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.igbp.0.05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/maximum_snow_albedo.0.05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.igbp.conus.0.01.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/slope_type.1.0.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.modis.igbp.0.03.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/snowfree_albedo.4comp.0.05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.modis.igbp.0.05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/soil_type.statsgo.0.03.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.modis.igbp.conus.0.01.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/soil_type.statsgo.0.05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.viirs.igbp.0.03.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/soil_type.statsgo.conus.0.01.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.viirs.igbp.0.05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/substrate_temperature.1.0.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.viirs.igbp.0.1.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/substrate_temperature.2.6x1.5.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.viirs.igbp.conus.0.01.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_greenness.0.144.nc From 7e8213f7aea91f3948a5f98bddcd0b6103a248a2 Mon Sep 17 00:00:00 2001 From: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com> Date: Mon, 12 Aug 2024 09:29:41 -0400 Subject: [PATCH 39/42] [develop] Update ufs-weather-model hash and further clean the machines tested in PULL_REQUEST_TEMPLATE (#1096) * Update ufs-weather-model hash to b5a1976 (July 30) * Add hera.gnu, remove cheyenne.intel, cheyenne.gnu, and gaeac5.intel, and alphabetize the machines in the TESTS CONDUCTED section of the PULL_REQUEST_TEMPLATE * Correct behavior of Jenkins Functional WorkflowTaskTests. Currently, TASK_DEPTH is set to null, resulting in no tests being run during the Functional WorkflowTaskTests stage. Replaced env with params in Jenkinsfile for setting TASK_DEPTH. Testing shows that this will correctly set TASK_DEPTH to the default value of 9 and allow the tests to run * Removed extraneous entries from the verification scripts to remove KeyError messages in the associated verification log files * Reapplied necessary modification to modulefiles/tasks/noaacloud/plot_allvars.local.lua to allow plotting tasks to run on NOAA cloud platforms --- .cicd/Jenkinsfile | 4 ++-- .github/PULL_REQUEST_TEMPLATE | 10 ++++------ Externals.cfg | 2 +- doc/ContribGuide/contributing.rst | 10 ++++------ modulefiles/tasks/noaacloud/plot_allvars.local.lua | 7 ++----- .../exregional_run_met_genensprod_or_ensemblestat.sh | 2 +- scripts/exregional_run_met_gridstat_or_pointstat_vx.sh | 2 +- ...egional_run_met_gridstat_or_pointstat_vx_ensmean.sh | 2 +- ...egional_run_met_gridstat_or_pointstat_vx_ensprob.sh | 2 +- scripts/exregional_run_met_pb2nc_obs.sh | 3 +-- scripts/exregional_run_met_pcpcombine.sh | 2 +- 11 files changed, 19 insertions(+), 27 deletions(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 030661bd27..e6ed9515f2 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -193,9 +193,9 @@ pipeline { // Try a few Workflow Task scripts to make sure E2E tests can be launched in a follow-on 'Test' stage stage('Functional WorkflowTaskTests') { environment { - TASK_DEPTH = "${env.SRW_WRAPPER_TASK_DEPTH}" + TASK_DEPTH = "${params.SRW_WRAPPER_TASK_DEPTH}" } - + steps { dir ("${env.SRW_PLATFORM}") { echo "Running ${TASK_DEPTH} simple workflow script task tests on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" diff --git a/.github/PULL_REQUEST_TEMPLATE b/.github/PULL_REQUEST_TEMPLATE index 1c363c651f..29a878d4a4 100644 --- a/.github/PULL_REQUEST_TEMPLATE +++ b/.github/PULL_REQUEST_TEMPLATE @@ -30,15 +30,13 @@ -- [ ] hera.intel -- [ ] orion.intel -- [ ] hercules.intel -- [ ] cheyenne.intel -- [ ] cheyenne.gnu - [ ] derecho.intel - [ ] gaea.intel -- [ ] gaeac5.intel +- [ ] hera.gnu +- [ ] hera.intel +- [ ] hercules.intel - [ ] jet.intel +- [ ] orion.intel - [ ] wcoss2.intel - [ ] NOAA Cloud (indicate which platform) - [ ] Jenkins diff --git a/Externals.cfg b/Externals.cfg index 25ec5f79b9..4545cd8ca5 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -12,7 +12,7 @@ protocol = git repo_url = https://github.com/ufs-community/ufs-weather-model # Specify either a branch name or a hash but not both. #branch = develop -hash = 1c6b4d4 +hash = b5a1976 local_path = sorc/ufs-weather-model required = True diff --git a/doc/ContribGuide/contributing.rst b/doc/ContribGuide/contributing.rst index ed1671363e..eb995efb41 100644 --- a/doc/ContribGuide/contributing.rst +++ b/doc/ContribGuide/contributing.rst @@ -227,15 +227,13 @@ Here is the template that is provided when developers click "Create pull request - - [ ] hera.intel - - [ ] orion.intel - - [ ] hercules.intel - - [ ] cheyenne.intel - - [ ] cheyenne.gnu - [ ] derecho.intel - [ ] gaea.intel - - [ ] gaeac5.intel + - [ ] hera.gnu + - [ ] hera.intel + - [ ] hercules.intel - [ ] jet.intel + - [ ] orion.intel - [ ] wcoss2.intel - [ ] NOAA Cloud (indicate which platform) - [ ] Jenkins diff --git a/modulefiles/tasks/noaacloud/plot_allvars.local.lua b/modulefiles/tasks/noaacloud/plot_allvars.local.lua index 2fd9b41eb5..85291013c7 100644 --- a/modulefiles/tasks/noaacloud/plot_allvars.local.lua +++ b/modulefiles/tasks/noaacloud/plot_allvars.local.lua @@ -1,5 +1,2 @@ -unload("python") -append_path("MODULEPATH","/contrib/EPIC/miniconda3/modulefiles") -load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) - -setenv("SRW_GRAPHICS_ENV", "regional_workflow") +load("conda") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh index 05503bb963..1c09dc09c6 100755 --- a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh +++ b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh @@ -10,7 +10,7 @@ . $USHdir/source_util_funcs.sh for sect in user nco platform workflow nco global verification cpl_aqm_parm \ constants fixed_files grid_params \ - task_run_post task_run_vx_ensgrid ; do + task_run_post ; do source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} done # diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh index 03c6093943..abe5e3dd31 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh @@ -10,7 +10,7 @@ . $USHdir/source_util_funcs.sh for sect in user nco platform workflow nco global verification cpl_aqm_parm \ constants fixed_files grid_params \ - task_run_post task_run_vx_gridstat task_run_vx_pointstat ; do + task_run_post ; do source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} done # diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh index 12a54dc21b..2c8378c128 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh @@ -10,7 +10,7 @@ . $USHdir/source_util_funcs.sh for sect in user nco platform workflow nco global verification cpl_aqm_parm \ constants fixed_files grid_params \ - task_run_post task_run_vx_ensgrid_mean task_run_vx_enspoint_mean ; do + task_run_post ; do source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} done # diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh index 8fd4a59dfe..eae1850ad8 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh @@ -10,7 +10,7 @@ . $USHdir/source_util_funcs.sh for sect in user nco platform workflow nco global verification cpl_aqm_parm \ constants fixed_files grid_params \ - task_run_vx_ensgrid_prob task_run_vx_enspoint_prob task_run_post ; do + task_run_post ; do source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} done # diff --git a/scripts/exregional_run_met_pb2nc_obs.sh b/scripts/exregional_run_met_pb2nc_obs.sh index 5281021f01..7e79fb4efb 100755 --- a/scripts/exregional_run_met_pb2nc_obs.sh +++ b/scripts/exregional_run_met_pb2nc_obs.sh @@ -9,8 +9,7 @@ # . $USHdir/source_util_funcs.sh for sect in user nco platform workflow nco global verification cpl_aqm_parm \ - constants fixed_files grid_params \ - task_run_met_pb2nc_obs ; do + constants fixed_files grid_params ; do source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} done # diff --git a/scripts/exregional_run_met_pcpcombine.sh b/scripts/exregional_run_met_pcpcombine.sh index ce9e78ab17..026afb4eb2 100755 --- a/scripts/exregional_run_met_pcpcombine.sh +++ b/scripts/exregional_run_met_pcpcombine.sh @@ -10,7 +10,7 @@ . $USHdir/source_util_funcs.sh for sect in user nco platform workflow nco global verification cpl_aqm_parm \ constants fixed_files grid_params \ - task_run_met_pcpcombine task_run_post ; do + task_run_post ; do source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} done # From 83f173c431c5ac91bf6e3eb7765cd2a6be94f6db Mon Sep 17 00:00:00 2001 From: Natalie Perlin <68030316+natalie-perlin@users.noreply.github.com> Date: Fri, 23 Aug 2024 08:56:34 -0400 Subject: [PATCH 40/42] [develop] Updates to devclean.sh script and plotting scripts and tasks (#1100) * ./devclean.sh script that cleans SRW builds is updated, all the cleaning tasks are done for the directories under the main SRW tree * Documentation updated for the devclean.sh script changes * Plotting scripts updated to have geographical data visible over the colored fields * Plotting task updated to allow graphics output for individual ensemble members * Use python3 to checkout external sub-modules in a checkout_externals script; python3 is a default for other scripts; some systems such as MacOS no longer come with python2 --------- Co-authored-by: Natalie Perlin Co-authored-by: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com> Co-authored-by: gsketefian <31046882+gsketefian@users.noreply.github.com> Co-authored-by: Gillian Petro <96886803+gspetro-NOAA@users.noreply.github.com> Co-authored-by: Christina Holt <56881914+christinaholtNOAA@users.noreply.github.com> Co-authored-by: Michael Kavulich Co-authored-by: michael.lueken Co-authored-by: EdwardSnyder-NOAA <96196752+EdwardSnyder-NOAA@users.noreply.github.com> Co-authored-by: Natalie Perlin --- devclean.sh | 207 +++++++++--------- doc/UsersGuide/Reference/FAQ.rst | 28 ++- manage_externals/checkout_externals | 2 +- parm/wflow/plot.yaml | 53 +++-- scripts/exregional_plot_allvars.py | 4 + scripts/exregional_plot_allvars_diff.py | 4 + ...S_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml | 6 +- 7 files changed, 167 insertions(+), 137 deletions(-) diff --git a/devclean.sh b/devclean.sh index 01ace7a7d9..6cd9bed11f 100755 --- a/devclean.sh +++ b/devclean.sh @@ -4,33 +4,31 @@ usage () { cat << EOF_USAGE -Clean the UFS-SRW Application build +Clean the UFS-SRW Application build. + +NOTE: If user included custom directories at build time, those directories must be deleted manually + Usage: $0 [OPTIONS] ... OPTIONS -h, --help - show this help guide + Show this help guide -a, --all - removes "bin", "build" directories, and other build artifacts - --remove - removes the "build" directory, keeps the "bin", "lib" and other build artifacts intact - --clean - removes "bin", "build" directories, and other build artifacts (same as "-a", "--all") - --conda - removes "conda" directory and conda_loc file in SRW - --install-dir=INSTALL_DIR - installation directory name (\${SRW_DIR} by default) - --build-dir=BUILD_DIR - main build directory, absolute path (\${SRW_DIR}/build/ by default) - --bin-dir=BIN_DIR - binary directory name ("exec" by default); full path is \${INSTALL_DIR}/\${BIN_DIR}) - --conda-dir=CONDA_DIR - directory where conda is installed. caution: if outside the SRW clone, it may have broader use - --sub-modules - remove sub-module directories. They will need to be checked out again by sourcing "\${SRW_DIR}/manage_externals/checkout_externals" before attempting subsequent builds + Remove all build artifacts, conda and submodules (equivalent to \`-b -c -s\`) + -b, --build + Remove build directories and artifacts: build/ exec/ share/ include/ lib/ lib64/ + -c, --conda + Remove "conda" directory and conda_loc file in SRW main directory + --container + For cleaning builds within the SRW containers, will remove the "container-bin" + directory rather than "exec". Has no effect if \`-b\` is not specified. + -f, --force + Remove directories as requested, without asking for user confirmation of their deletion. + -s, -sub-modules + Remove sub-module directories. They need to be checked out again by sourcing "\${SRW_DIR}/manage_externals/checkout_externals" before attempting subsequent builds -v, --verbose - provide more verbose output - + Provide more verbose output + EOF_USAGE } @@ -39,17 +37,10 @@ settings () { cat << EOF_SETTINGS Settings: - INSTALL_DIR=${INSTALL_DIR} - BUILD_DIR=${BUILD_DIR} - BIN_DIR=${BIN_DIR} - CONDA_DIR=${CONDA_DIR} - REMOVE=${REMOVE} + FORCE=${REMOVE} VERBOSE=${VERBOSE} - -Default cleaning options: (if no arguments provided, then nothing is cleaned) - REMOVE=${REMOVE} - CLEAN=${CLEAN} - INCLUDE_SUB_MODULES=${INCLUDE_SUB_MODULES} + REMOVE_SUB_MODULES=${REMOVE_SUB_MODULES} + REMOVE_CONDA=${REMOVE_CONDA} EOF_SETTINGS } @@ -63,46 +54,28 @@ usage_error () { # default settings SRW_DIR=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}" )" )" && pwd -P) -INSTALL_DIR=${INSTALL_DIR:-${SRW_DIR}} -BUILD_DIR=${BUILD_DIR:-"${SRW_DIR}/build"} -BIN_DIR="exec" -CONDA_DIR=${CONDA_DIR:-"${SRW_DIR}/conda"} -REMOVE=false VERBOSE=false # default clean options REMOVE=false -CLEAN=false -INCLUDE_SUB_MODULES=false #changes to true if '--sub-modules' option is provided +REMOVE_BUILD=false +REMOVE_CONDA=false +REMOVE_SUB_MODULES=false +CONTAINER=false -# process requires arguments -if [[ ("$1" == "--help") || ("$1" == "-h") ]]; then - usage - exit 0 -fi - -# process optional arguments +# process arguments while :; do case $1 in --help|-h) usage; exit 0 ;; - --all|-a) ALL_CLEAN=true ;; - --remove) REMOVE=true ;; - --remove=?*|--remove=) usage_error "$1 argument ignored." ;; - --clean) CLEAN=true ;; - --conda) REMOVE_CONDA=true ;; - --install-dir=?*) INSTALL_DIR=${1#*=} ;; - --install-dir|--install-dir=) usage_error "$1 requires argument." ;; - --build-dir=?*) BUILD_DIR=${1#*=} ;; - --build-dir|--build-dir=) usage_error "$1 requires argument." ;; - --bin-dir=?*) BIN_DIR=${1#*=} ;; - --bin-dir|--bin-dir=) usage_error "$1 requires argument." ;; - --conda-dir=?*) CONDA_DIR=${1#*=} ;; - --conda-dir|--conda-dir=) usage_error "$1 requires argument." ;; - --sub-modules) INCLUDE_SUB_MODULES=true ;; + --all|-a) REMOVE_BUILD=true; REMOVE_CONDA=true; REMOVE_SUB_MODULES=true ;; + --build|-b) REMOVE_BUILD=true ;; + --conda|-c) REMOVE_CONDA=true ;; + --container) CONTAINER=true ;; + --force) REMOVE=true ;; + --force=?*|--force=) usage_error "$1 argument ignored." ;; + --sub-modules|-s) REMOVE_SUB_MODULES=true ;; + --sub-modules=?*|--sub-modules=) usage_error "$1 argument ignored." ;; --verbose|-v) VERBOSE=true ;; - --verbose=?*|--verbose=) usage_error "$1 argument ignored." ;; - # targets - default) ALL_CLEAN=false ;; # unknown -?*|?*) usage_error "Unknown option $1" ;; *) break ;; @@ -110,66 +83,94 @@ while :; do shift done -# choose defaults to clean -if [ "${ALL_CLEAN}" = true ]; then - CLEAN=true -fi # print settings if [ "${VERBOSE}" = true ] ; then settings fi -# clean if build directory already exists -if [ "${REMOVE}" = true ] && [ "${CLEAN}" = false ] ; then - printf '%s\n' "Remove the \"build\" directory only, BUILD_DIR = $BUILD_DIR " - [[ -d ${BUILD_DIR} ]] && rm -rf ${BUILD_DIR} && printf '%s\n' "rm -rf ${BUILD_DIR}" -elif [ "${CLEAN}" = true ]; then - printf '%s\n' "Remove build directory, bin directory, and other build artifacts " - printf '%s\n' " from the installation directory = ${INSTALL_DIR} " - - directories=( \ - "${BUILD_DIR}" \ - "${INSTALL_DIR}/${BIN_DIR}" \ - "${INSTALL_DIR}/share" \ - "${INSTALL_DIR}/include" \ - "${INSTALL_DIR}/lib" \ - "${INSTALL_DIR}/lib64" \ +# Populate "removal_list" as an array of files/directories to remove, based on user selections +declare -a removal_list='()' + +# Clean standard build artifacts +if [ ${REMOVE_BUILD} == true ]; then + removal_list=( \ + "${SRW_DIR}/build" \ + "${SRW_DIR}/share" \ + "${SRW_DIR}/include" \ + "${SRW_DIR}/lib" \ + "${SRW_DIR}/lib64" \ ) - if [ ${#directories[@]} -ge 1 ]; then - for dir in ${directories[@]}; do - [[ -d "${dir}" ]] && rm -rfv ${dir} - done - echo " " + if [ ${CONTAINER} == true ]; then + removal_list+=("${SRW_DIR}/container-bin") + else + removal_list+=("${SRW_DIR}/exec") fi fi -# Clean all the submodules if requested. Note: Need to check out them again before attempting subsequent builds, by sourcing ${SRW_DIR}/manage_externals/checkout_externals -if [ ${INCLUDE_SUB_MODULES} == true ]; then - printf '%s\n' "Removing submodules ..." + +# Clean all the submodules if requested. +if [ ${REMOVE_SUB_MODULES} == true ]; then declare -a submodules='()' - submodules=(${SRW_DIR}/sorc/*) -# echo " submodules are: ${submodules[@]} (total of ${#submodules[@]}) " - if [ ${#submodules[@]} -ge 1 ]; then - for sub in ${submodules[@]}; do [[ -d "${sub}" ]] && ( rm -rf ${sub} && printf '%s\n' "rm -rf ${sub}" ); done + submodules=(./sorc/*) + # Only add directories to make sure we don't delete CMakeLists.txt + for sub in ${submodules[@]}; do [[ -d "${sub}" ]] && removal_list+=( "${sub}" ); done + if [ "${VERBOSE}" = true ] ; then + printf '%s\n' "Note: Need to check out submodules again for any subsequent builds, " \ + " by running ${SRW_DIR}/manage_externals/checkout_externals " fi - printf '%s\n' "Note: Need to check out submodules again for any subsequent builds, " \ - " by sourcing ${SRW_DIR}/manage_externals/checkout_externals " fi -# # Clean conda if requested if [ "${REMOVE_CONDA}" = true ] ; then - printf '%s\n' "Removing conda location file" - rm -rf ${SRW_DIR}/conda_loc - printf '%s\n' "Removing conda installation" - rm -rf ${CONDA_DIR} + # Do not read "conda_loc" file to determine location of conda install; if the user has changed it to a different location + # they likely do not want to remove it! + conda_location=$(<${SRW_DIR}/conda_loc) + if [ "${VERBOSE}" = true ] ; then + echo "conda_location=$conda_location" + fi + if [ "${conda_location}" == "${SRW_DIR}/conda" ]; then + removal_list+=("${SRW_DIR}/conda_loc") + removal_list+=("${SRW_DIR}/conda") + else + echo "WARNING: location of conda build in ${SRW_DIR}/conda_loc is not the default location!" + echo "Will not attempt to remove conda!" + fi fi +# If array is empty, that means user has not selected any removal options +if [ ${#removal_list[@]} -eq 0 ]; then + usage_error "No removal options specified" +fi +while [ ${REMOVE} == false ]; do + # Make user confirm deletion of directories unless '--force' option was provided + printf "The following files/directories will be deleted:\n\n" + for i in "${removal_list[@]}"; do + echo "$i" + done + echo "" + read -p "Confirm that you want to delete these files/directories! (Yes/No): " choice + case ${choice} in + [Yy]* ) REMOVE=true ;; + [Nn]* ) echo "User chose not to delete, exiting..."; exit ;; + * ) printf "Invalid option selected.\n" ;; + esac +done + +if [ ${REMOVE} == true ]; then + for dir in ${removal_list[@]}; do + echo "Removing ${dir}" + if [ "${VERBOSE}" = true ] ; then + rm -rfv ${dir} + else + rm -rf ${dir} + fi + done + echo " " + echo "All the requested cleaning tasks have been completed" + echo " " +fi -echo " " -echo "All the requested cleaning tasks have been completed" -echo " " exit 0 diff --git a/doc/UsersGuide/Reference/FAQ.rst b/doc/UsersGuide/Reference/FAQ.rst index 21bef328a3..e8c3df0dec 100644 --- a/doc/UsersGuide/Reference/FAQ.rst +++ b/doc/UsersGuide/Reference/FAQ.rst @@ -20,34 +20,48 @@ Building the SRW App How can I clean up the SRW App code if something went wrong during the build? =============================================================================== -The ``ufs-srweather-app`` repository contains a ``devclean.sh`` convenience script. This script can be used to clean up code if something goes wrong when checking out externals or building the application. To view usage instructions and to get help, run with the ``-h`` flag: +The ``ufs-srweather-app`` repository contains a ``devclean.sh`` convenience script. This script can be used to clean up code if something goes wrong when checking out externals or building the application. To view usage instructions and to get help, run with the ``-h`` or ``--help`` flag: .. code-block:: console ./devclean.sh -h -To remove the ``build`` directory, run: +To remove all the build artifacts and directories except conda installation, use the ``-b`` or ``--build`` flag: .. code-block:: console - ./devclean.sh --remove + ./devclean.sh --build -To remove all build artifacts (including ``build``, ``exec``, ``lib``, and ``share``), run: +When using a containerized approach of running the SRW, use the ``--container`` option that will make sure to remove ``container-bin`` directory in lieu of the ``exec``, i.e.: .. code-block:: console - ./devclean.sh --clean + ./devclean.sh -b --container + +To remove only conda directory and conda_loc file in the main SRW directory, run with the ``-c`` or ``--conda`` flag: + +.. code-block:: console + + ./devclean.sh --conda OR - ./devclean.sh -a + ./devclean.sh -c -To remove external submodules, run: +To remove external submodules, run with the ``-s`` or ``--sub-modules`` flag: .. code-block:: console ./devclean.sh --sub-modules +To remove all build artifacts, conda and submodules (equivalent to \`-b -c -s\`), run with the ``-a`` or ``--all`` flag: + +.. code-block:: console + + ./devclean.sh --all + + Users will need to check out the external submodules again before building the application. + In addition to the options above, many standard terminal commands can be run to remove unwanted files and directories (e.g., ``rm -rf expt_dirs``). A complete explanation of these options is beyond the scope of this User's Guide. =========================== diff --git a/manage_externals/checkout_externals b/manage_externals/checkout_externals index a0698baef0..48bce24010 100755 --- a/manage_externals/checkout_externals +++ b/manage_externals/checkout_externals @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 """Main driver wrapper around the manic/checkout utility. diff --git a/parm/wflow/plot.yaml b/parm/wflow/plot.yaml index 445d238c15..0c98e51711 100644 --- a/parm/wflow/plot.yaml +++ b/parm/wflow/plot.yaml @@ -12,10 +12,12 @@ default_task_plot: &default_task PDY: !cycstr "@Y@m@d" cyc: !cycstr "@H" subcyc: !cycstr "@M" + fhr: '#fhr#' LOGDIR: !cycstr "&LOGDIR;" SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' ENSMEM_INDX: '#mem#' - nprocs: '{{ nnodes * ppn }}' + nprocs: '{{ parent.nnodes * parent.ppn }}' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' native: '{{ platform.SCHED_NATIVE_CMD }}' nnodes: 1 nodes: '{{ nnodes }}:ppn={{ ppn }}' @@ -24,25 +26,30 @@ default_task_plot: &default_task queue: '&QUEUE_DEFAULT;' walltime: 01:00:00 -task_plot_allvars: - <<: *default_task - command: '&LOAD_MODULES_RUN_TASK; "plot_allvars" "&JOBSdir;/JREGIONAL_PLOT_ALLVARS"' - join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' - dependency: - or_do_post: &post_files_exist - and_run_post: # If post was meant to run, wait on the whole post metatask - taskvalid: - attrs: - task: run_post_mem000_f000 - metataskdep: - attrs: - metatask: run_ens_post - and_inline_post: # If inline post ran, wait on the forecast task to complete - not: - taskvalid: - attrs: - task: run_post_mem000_f000 - taskdep: - attrs: - task: run_fcst_mem000 - +metatask_plot_allvars: + var: + mem: '{% if global.DO_ENSEMBLE %}{%- for m in range(1, global.NUM_ENS_MEMBERS+1) -%}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' + metatask_plot_allvars_mem#mem#_all_fhrs: + var: + fhr: '{% for h in range(0, workflow.LONG_FCST_LEN+1) %}{{ " %03d" % h }}{% endfor %}' + cycledef: '{% for h in range(0, workflow.LONG_FCST_LEN+1) %}{% if h <= workflow.FCST_LEN_CYCL|min %}forecast {% else %}long_forecast {% endif %}{% endfor %}' + task_plot_allvars_mem#mem#_f#fhr#: + <<: *default_task + command: '&LOAD_MODULES_RUN_TASK; "plot_allvars" "&JOBSdir;/JREGIONAL_PLOT_ALLVARS"' + dependency: + or_do_post: &post_files_exist + and_run_post: # If post was meant to run, wait on the whole post metatask + taskvalid: + attrs: + task: run_post_mem#mem#_f#fhr# + metataskdep: + attrs: + metatask: run_ens_post + and_inline_post: # If inline post ran, wait on the forecast task to complete + not: + taskvalid: + attrs: + task: run_post_mem#mem#_f#fhr# + taskdep: + attrs: + task: run_post_mem#mem#_f#fhr# \ No newline at end of file diff --git a/scripts/exregional_plot_allvars.py b/scripts/exregional_plot_allvars.py index 27eff0f4b0..040e17b012 100755 --- a/scripts/exregional_plot_allvars.py +++ b/scripts/exregional_plot_allvars.py @@ -577,6 +577,7 @@ def plot_all(dom): facecolor="none", linewidth=fline_wd, alpha=falpha, + zorder=4, ) coastline = cfeature.NaturalEarthFeature( "physical", @@ -586,6 +587,7 @@ def plot_all(dom): facecolor="none", linewidth=fline_wd, alpha=falpha, + zorder=4, ) states = cfeature.NaturalEarthFeature( "cultural", @@ -596,6 +598,7 @@ def plot_all(dom): linewidth=fline_wd, linestyle=":", alpha=falpha, + zorder=4, ) borders = cfeature.NaturalEarthFeature( "cultural", @@ -605,6 +608,7 @@ def plot_all(dom): facecolor="none", linewidth=fline_wd, alpha=falpha, + zorder=4, ) # All lat lons are earth relative, so setup the associated projection correct for that data diff --git a/scripts/exregional_plot_allvars_diff.py b/scripts/exregional_plot_allvars_diff.py index e51a3a6b57..61efcdb82b 100755 --- a/scripts/exregional_plot_allvars_diff.py +++ b/scripts/exregional_plot_allvars_diff.py @@ -652,6 +652,7 @@ def plot_all(dom): facecolor="none", linewidth=fline_wd, alpha=falpha, + zorder=4, ) coastline = cfeature.NaturalEarthFeature( "physical", @@ -661,6 +662,7 @@ def plot_all(dom): facecolor="none", linewidth=fline_wd, alpha=falpha, + zorder=4, ) states = cfeature.NaturalEarthFeature( "cultural", @@ -671,6 +673,7 @@ def plot_all(dom): linewidth=fline_wd, linestyle=":", alpha=falpha, + zorder=4, ) borders = cfeature.NaturalEarthFeature( "cultural", @@ -680,6 +683,7 @@ def plot_all(dom): facecolor="none", linewidth=fline_wd, alpha=falpha, + zorder=4, ) # All lat lons are earth relative, so setup the associated projection correct for that data diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml index 933042c82f..8e93259539 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml @@ -3,8 +3,8 @@ metadata: This test is to ensure that the workflow running in community mode completes successfully on the RRFS_CONUS_25km grid using the GFS_v16 physics suite with ICs and LBCs derived from the NAM. - This test also runs with two ensemble members, and ensures the MET - ensemble-specific tasks run successfully. + This test also runs with two ensemble members, runs ploting tasks for each + ensemble member, and ensures the MET ensemble-specific tasks run successfully. user: RUN_ENVIR: community workflow: @@ -16,7 +16,7 @@ workflow: PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml", "parm/wflow/verify_ens.yaml", "parm/wflow/test.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml", "parm/wflow/verify_ens.yaml", "parm/wflow/test.yaml"]|include }}' metatask_run_ensemble: task_run_fcst_mem#mem#: walltime: 01:00:00 From d9c5fec963243903f154c0e6225ac6acec2927cf Mon Sep 17 00:00:00 2001 From: Anna Kimball <131040494+ankimball@users.noreply.github.com> Date: Wed, 4 Sep 2024 08:10:18 -0500 Subject: [PATCH 41/42] [develop] Fix for SonarQube forked repo renaming failure (#1115) SonarQube job fails to find user's repository if they rename when creating a fork, this change to the Jenkinsfile will pass the user's url to the SonarQube job so that it doesn't have to form the URL itself. Also passes change ID (PR number) so that information on the SonarQube job can be archived to s3 and properly aligned with the corresponding PR. --- .cicd/Jenkinsfile | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index e6ed9515f2..5b90ab1173 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -29,9 +29,15 @@ pipeline { stage('Launch SonarQube') { steps { script { + echo "BRANCH_NAME=${env.CHANGE_BRANCH}" + echo "FORK_NAME=${env.CHANGE_FORK}" + echo "CHANGE_URL=${env.CHANGE_URL}" + echo "CHANGE_ID=${env.CHANGE_ID}" build job: '/ufs-srweather-app/ufs-srw-sonarqube', parameters: [ string(name: 'BRANCH_NAME', value: env.CHANGE_BRANCH ?: 'develop'), - string(name: 'FORK_NAME', value: env.CHANGE_FORK ?: '') + string(name: 'FORK_NAME', value: env.CHANGE_FORK ?: ''), + string(name: 'CHANGE_URL', value: env.CHANGE_URL ?: ''), + string(name: 'CHANGE_ID', value: env.CHANGE_ID ?: '') ], wait: false } } From 26cdad8e2045612d4c67d201d6870016e8582afd Mon Sep 17 00:00:00 2001 From: Natalie Perlin <68030316+natalie-perlin@users.noreply.github.com> Date: Thu, 12 Sep 2024 09:01:54 -0400 Subject: [PATCH 42/42] [develop] Added an option for RRFS external model files used as ICS and LBCS (#1089) * An option to use RRFS model output (control) files are added as initial and lateral boundary conditions, ICS and LBCS. RRFS_a data for the test was retrieved from the NODD website ((https://registry.opendata.aws/noaa-rrfs/)), pressure-level grib2 files from the control directory, RRFS forecasts interpolated into 3-km regular grid. * A new test has been added grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta with RRFS input files for the event on 06/05/2024 with the tornadoes reported in Maryland. --------- Co-authored-by: Natalie Perlin Co-authored-by: Natalie Perlin Co-authored-by: Christina Holt <56881914+christinaholtNOAA@users.noreply.github.com> Co-authored-by: Michael Lueken <63728921+MichaelLueken@users.noreply.github.com> Co-authored-by: Gillian Petro <96886803+gspetro-NOAA@users.noreply.github.com> --- devclean.sh | 2 +- doc/UsersGuide/BackgroundInfo/Components.rst | 2 +- .../BuildingRunningTesting/RunSRW.rst | 1 + .../BuildingRunningTesting/WE2Etests.rst | 1 + .../CustomizingTheWorkflow/ConfigWorkflow.rst | 4 +-- .../InputOutputFiles.rst | 14 +++++---- .../CustomizingTheWorkflow/LAMGrids.rst | 2 +- doc/UsersGuide/Reference/Glossary.rst | 3 +- parm/data_locations.yml | 14 +++++++++ parm/wflow/plot.yaml | 3 +- scripts/exregional_make_ics.sh | 18 +++++++----- scripts/exregional_make_lbcs.sh | 13 ++++++--- tests/WE2E/machine_suites/comprehensive | 1 + .../WE2E/machine_suites/comprehensive.derecho | 1 + .../machine_suites/comprehensive.noaacloud | 1 + tests/WE2E/machine_suites/comprehensive.orion | 1 + tests/WE2E/machine_suites/coverage.orion | 1 + ..._ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta.yaml | 29 +++++++++++++++++++ ush/setup.py | 4 +-- ush/valid_param_vals.yaml | 4 +-- 20 files changed, 92 insertions(+), 27 deletions(-) create mode 100644 tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta.yaml diff --git a/devclean.sh b/devclean.sh index 6cd9bed11f..b26988dd93 100755 --- a/devclean.sh +++ b/devclean.sh @@ -24,7 +24,7 @@ OPTIONS directory rather than "exec". Has no effect if \`-b\` is not specified. -f, --force Remove directories as requested, without asking for user confirmation of their deletion. - -s, -sub-modules + -s, --sub-modules Remove sub-module directories. They need to be checked out again by sourcing "\${SRW_DIR}/manage_externals/checkout_externals" before attempting subsequent builds -v, --verbose Provide more verbose output diff --git a/doc/UsersGuide/BackgroundInfo/Components.rst b/doc/UsersGuide/BackgroundInfo/Components.rst index 1ba9349d8d..559576725d 100644 --- a/doc/UsersGuide/BackgroundInfo/Components.rst +++ b/doc/UsersGuide/BackgroundInfo/Components.rst @@ -22,7 +22,7 @@ UFS Preprocessing Utilities (UFS_UTILS) The SRW Application includes a number of pre-processing utilities (UFS_UTILS) that initialize and prepare the model. Since the SRW App provides forecast predictions over a limited area (rather than globally), these utilities generate a regional grid (``regional_esg_grid/make_hgrid``) along with :term:`orography` (``orog``) and surface climatology (``sfc_climo_gen``) files on that grid. Grids include a strip, or "halo," of six cells that surround the regional grid and feed in lateral boundary condition data. Since different grid and orography files require different numbers of :term:`halo` cells, additional utilities handle topography filtering and shave the number of halo points (based on downstream workflow component requirements). The pre-processing software :term:`chgres_cube` is used to convert the raw external model data into initial and lateral boundary condition files in :term:`netCDF` format. These are needed as input to the :term:`FV3` limited area model (:term:`LAM`). Additional information about the UFS pre-processing utilities can be found in the :doc:`UFS_UTILS Technical Documentation ` and in the `UFS_UTILS Scientific Documentation `__. -The SRW Application can be initialized from a range of operational initial condition files. It is possible to initialize the model from the Global Forecast System (:term:`GFS`), North American Mesoscale (:term:`NAM`) Forecast System, Rapid Refresh (:term:`RAP`), and High-Resolution Rapid Refresh (:term:`HRRR`) files in Gridded Binary v2 (:term:`GRIB2`) format. GFS files also come in :term:`NEMSIO` format for past dates. +The SRW Application can be initialized from a range of operational initial condition files. It is possible to initialize the model from the Global Forecast System (:term:`GFS`), North American Mesoscale (:term:`NAM`) Forecast System, Rapid Refresh (:term:`RAP`), High-Resolution Rapid Refresh (:term:`HRRR`), and Rapid Refresh Forecast System (:term:`RRFS`) files in Gridded Binary v2 (:term:`GRIB2`) format. GFS files also come in :term:`NEMSIO` format for past dates. .. WARNING:: For GFS data, dates prior to 1 January 2018 may work but are not guaranteed. Public archives of model data can be accessed through the `NOAA Operational Model Archive and Distribution System `__ (NOMADS). Raw external model data may be pre-staged on disk by the user. diff --git a/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst b/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst index d7fd7407a8..b9471acd69 100644 --- a/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst +++ b/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst @@ -549,6 +549,7 @@ The ``data:`` section of the machine file can point to various data sources that netcdf: /Users/username/DATA/UFS/FV3GFS/netcdf RAP: /Users/username/DATA/UFS/RAP/grib2 HRRR: /Users/username/DATA/UFS/HRRR/grib2 + RRFS: /Users/username/DATA/UFS/RRFS/grib2 This can be helpful when conducting multiple experiments with different types of data. diff --git a/doc/UsersGuide/BuildingRunningTesting/WE2Etests.rst b/doc/UsersGuide/BuildingRunningTesting/WE2Etests.rst index 4fca53b575..b3a7bf847b 100644 --- a/doc/UsersGuide/BuildingRunningTesting/WE2Etests.rst +++ b/doc/UsersGuide/BuildingRunningTesting/WE2Etests.rst @@ -78,6 +78,7 @@ For convenience, the WE2E tests are currently grouped into the following categor FV3GFS: RAP: HRRR: + RRFS: Some tests are duplicated among the above categories via symbolic links, both for legacy reasons (when tests for different capabilities were consolidated) and for convenience when a user would like to run all tests for a specific category (e.g., verification tests). diff --git a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst index 5161268980..50835a2451 100644 --- a/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst +++ b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst @@ -912,7 +912,7 @@ Basic Task Parameters For each workflow task, certain parameter values must be passed to the job scheduler (e.g., Slurm), which submits a job for the task. ``EXTRN_MDL_NAME_ICS``: (Default: "FV3GFS") - The name of the external model that will provide fields from which initial condition (IC) files, surface files, and 0-th hour boundary condition files will be generated for input into the forecast model. Valid values: ``"GSMGFS"`` | ``"FV3GFS"`` | ``"GEFS"`` | ``"GDAS"`` | ``"RAP"`` | ``"HRRR"`` | ``"NAM"`` | ``"UFS-CASE-STUDY"`` + The name of the external model that will provide fields from which initial condition (IC) files, surface files, and 0-th hour boundary condition files will be generated for input into the forecast model. Valid values: ``"GSMGFS"`` | ``"FV3GFS"`` | ``"GEFS"`` | ``"GDAS"`` | ``"RAP"`` | ``"HRRR"`` | ``"RRFS"`` | ``"NAM"`` | ``"UFS-CASE-STUDY"`` ``EXTRN_MDL_ICS_OFFSET_HRS``: (Default: 0) Users may wish to start a forecast using forecast data from a previous cycle of an external model. This variable indicates how many hours earlier the external model started than the FV3 forecast configured here. For example, if the forecast should start from a 6-hour forecast of the GFS, then ``EXTRN_MDL_ICS_OFFSET_HRS: "6"``. @@ -966,7 +966,7 @@ Basic Task Parameters For each workflow task, certain parameter values must be passed to the job scheduler (e.g., Slurm), which submits a job for the task. ``EXTRN_MDL_NAME_LBCS``: (Default: "FV3GFS") - The name of the external model that will provide fields from which lateral boundary condition (LBC) files (except for the 0-th hour LBC file) will be generated for input into the forecast model. Valid values: ``"GSMGFS"`` | ``"FV3GFS"`` | ``"GEFS"`` | ``"GDAS"`` | ``"RAP"`` | ``"HRRR"`` | ``"NAM"`` | ``"UFS-CASE-STUDY"`` + The name of the external model that will provide fields from which lateral boundary condition (LBC) files (except for the 0-th hour LBC file) will be generated for input into the forecast model. Valid values: ``"GSMGFS"`` | ``"FV3GFS"`` | ``"GEFS"`` | ``"GDAS"`` | ``"RAP"`` | ``"HRRR"`` | ``"RRFS"`` | ``"NAM"`` | ``"UFS-CASE-STUDY"`` ``LBC_SPEC_INTVL_HRS``: (Default: 6) The interval (in integer hours) at which LBC files will be generated. This is also referred to as the *boundary update interval*. Note that the model selected in ``EXTRN_MDL_NAME_LBCS`` must have data available at a frequency greater than or equal to that implied by ``LBC_SPEC_INTVL_HRS``. For example, if ``LBC_SPEC_INTVL_HRS`` is set to "6", then the model must have data available at least every 6 hours. It is up to the user to ensure that this is the case. diff --git a/doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst b/doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst index 40227d7a2b..bf24055de4 100644 --- a/doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst +++ b/doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst @@ -20,8 +20,9 @@ The external model files needed for initializing an experiment can be obtained i ways, including: * Pulled from the `SRW App Data Bucket `__, - * Pulled from the NOAA High Performance Storage System (:term:`HPSS`) during the workflow execution (requires user access), or - * Obtained and staged by the user from a different source. + * Pulled from the NOAA High Performance Storage System (:term:`HPSS`) during the workflow execution (requires user access), + * Obtained and staged by the user from a different source, or + * Pulled from the `RRFS data bucket (rrfs_a data) `_. The data format for these files can be :term:`GRIB2` or :term:`NEMSIO`. More information on downloading and setting up the external model data can be found in :numref:`Section %s `. Once the data is set up, the end-to-end application will run the system and write output files to disk. @@ -246,7 +247,7 @@ The environment variables ``FIXgsm``, ``FIXorg``, and ``FIXsfc`` indicate the pa Initial Condition/Lateral Boundary Condition File Formats and Source ----------------------------------------------------------------------- -The SRW Application currently supports raw initial and lateral boundary conditions from numerous models (i.e., FV3GFS, GEFS, GDAS, NAM, RAP, HRRR). The data can be provided in three formats: :term:`NEMSIO`, :term:`netCDF`, or :term:`GRIB2`. +The SRW Application currently supports raw initial and lateral boundary conditions from numerous models (i.e., FV3GFS, GEFS, GDAS, NAM, RAP, HRRR, RRFS). The data can be provided in three formats: :term:`NEMSIO`, :term:`netCDF`, or :term:`GRIB2`. To download the model input data for the 12-hour "out-of-the-box" experiment configuration in ``config.community.yaml`` file, run: @@ -273,7 +274,7 @@ The paths to ``EXTRN_MDL_SOURCE_BASEDIR_ICS`` and ``EXTRN_MDL_SOURCE_BASEDIR_LBC USE_USER_STAGED_EXTRN_FILES: true EXTRN_MDL_SOURCE_BASEDIR_LBCS: /path/to/ufs-srweather-app/input_model_data/FV3GFS/grib2/YYYYMMDDHH -The two ``EXTRN_MDL_SOURCE_BASEDIR_*CS`` variables describe where the :term:`IC ` and :term:`LBC ` file directories are located, respectively. For ease of reusing ``config.yaml`` across experiments, it is recommended that users set up the raw :term:`IC/LBC ` file paths to include the model name (e.g., FV3GFS, GEFS, GDAS, NAM, RAP, HRRR), data format (e.g., grib2, nemsio), and date (in ``YYYYMMDDHH`` format). For example: ``/path/to/input_model_data/FV3GFS/grib2/2019061518/``. While there is flexibility to modify these settings, this structure will provide the most reusability for multiple dates when using the SRW Application workflow. +The two ``EXTRN_MDL_SOURCE_BASEDIR_*CS`` variables describe where the :term:`IC ` and :term:`LBC ` file directories are located, respectively. For ease of reusing ``config.yaml`` across experiments, it is recommended that users set up the raw :term:`IC/LBC ` file paths to include the model name (e.g., FV3GFS, GEFS, GDAS, NAM, RAP, HRRR, RRFS), data format (e.g., grib2, nemsio, netcdf), and date (in ``YYYYMMDDHH`` format). For example: ``/path/to/input_model_data/FV3GFS/grib2/2019061518/``. While there is flexibility to modify these settings, this structure will provide the most reusability for multiple dates when using the SRW Application workflow. When files are pulled from NOAA :term:`HPSS` (rather than downloaded from the data bucket), the naming convention looks something like this: @@ -290,11 +291,12 @@ When files are pulled from NOAA :term:`HPSS` (rather than downloaded from the da * RAP (GRIB2): ``rap.t{cycle}z.wrfprsf{fhr}.grib2`` * HRRR (GRIB2): ``hrrr.t{cycle}z.wrfprsf{fhr}.grib2`` +* RRFS (GRIB2): ``rrfs.t{cycle}z.prslev.f{fhr}.conus.grib2`` where: * ``{cycle}`` corresponds to the 2-digit hour of the day when the forecast cycle starts, and - * ``{fhr}`` corresponds to the 2- or 3-digit nth hour of the forecast (3-digits for FV3GFS/GDAS data and 2 digits for RAP/HRRR data). + * ``{fhr}`` corresponds to the 2- or 3-digit nth hour of the forecast (3-digits for FV3GFS/GDAS/RRFS data and 2 digits for RAP/HRRR data). For example, a forecast using FV3GFS GRIB2 data that starts at 18h00 UTC would have a ``{cycle}`` value of 18, which is the 000th forecast hour. The LBCS file for 21h00 UTC would be named ``gfs.t18z.pgrb2.0p25.f003``. @@ -353,6 +355,8 @@ AWS S3 Data Buckets: * GDAS: https://registry.opendata.aws/noaa-gfs-bdp-pds/ * HRRR: https://registry.opendata.aws/noaa-hrrr-pds/ (necessary fields for initializing available for dates 2015 and newer) * A list of the NOAA Open Data Dissemination (NODD) datasets can be found here: https://www.noaa.gov/nodd/datasets +* RRFS - experimental data is available starting 02/01/2024 for deteministic forecasts starting hourly. Forecast data are available out to 60 hours for 00, 06, 12, and 18 UTC starting times (cycles), and out to 18 hours for other cycles. Earlier dates, from 05/01/2023 to 01/31/2024, may contain only forecasts at 00, 06, 12, 18 UTC; user needs to verify that data exist for needed dates. + https://noaa-rrfs-pds.s3.amazonaws.com/index.html#rrfs_a/ NCEI Archive: diff --git a/doc/UsersGuide/CustomizingTheWorkflow/LAMGrids.rst b/doc/UsersGuide/CustomizingTheWorkflow/LAMGrids.rst index 1fd163e8c6..482caf8590 100644 --- a/doc/UsersGuide/CustomizingTheWorkflow/LAMGrids.rst +++ b/doc/UsersGuide/CustomizingTheWorkflow/LAMGrids.rst @@ -75,7 +75,7 @@ The 3-km CONUS domain is ideal for running the ``FV3_RRFS_v1beta`` physics suite The boundary of the ``RRFS_CONUS_3km`` domain is shown in :numref:`Figure %s ` (in red), and the boundary of the :ref:`write component grid ` sits just inside the computational domain (in blue). This extra grid is required because the post-processing utility (:term:`UPP`) is unable to process data on the native FV3 gnomonic grid (in red). Therefore, model data are interpolated to a Lambert conformal grid (the write component grid) in order for the :term:`UPP` to read in and correctly process the data. .. note:: - While it is possible to initialize the FV3-LAM with coarser external model data when using the ``RRFS_CONUS_3km`` domain, it is generally advised to use external model data (such as HRRR or RAP data) that has a resolution similar to that of the native FV3-LAM (predefined) grid. + While it is possible to initialize the FV3-LAM with coarser external model data when using the ``RRFS_CONUS_3km`` domain, it is generally advised to use external model data (such as HRRR, RRFS, or RAP data) that has a resolution similar to that of the native FV3-LAM (predefined) grid. Predefined SUBCONUS Grid Over Indianapolis diff --git a/doc/UsersGuide/Reference/Glossary.rst b/doc/UsersGuide/Reference/Glossary.rst index 90f9c8ab89..2612d4fbe8 100644 --- a/doc/UsersGuide/Reference/Glossary.rst +++ b/doc/UsersGuide/Reference/Glossary.rst @@ -227,7 +227,8 @@ Glossary A central location in which files (e.g., data, code, documentation) are stored and managed. RRFS - The `Rapid Refresh Forecast System `__ (RRFS) is NOAA's next-generation convection-allowing, rapidly-updated, ensemble-based data assimilation and forecasting system currently scheduled for operational implementation in 2024. It is designed to run forecasts on a 3-km :term:`CONUS` domain. + The `Rapid Refresh Forecast System `__ (RRFS) is NOAA's next-generation convection-allowing, rapidly-updated, ensemble-based data assimilation and forecasting system currently scheduled for operational implementation in 2024. It is designed to run forecasts on a 3-km :term:`CONUS` domain, see also `NOAA Rapid Refresh Forecast System (RRFS) `__. Experimental data is currently available from the `AWS S3 NOAA-RRFS `__ bucket for deterministic forecasts out to 60 hours at 00, 06, 12, and 18 UTC. Additionally, hourly forecasts out to 18 hours may be available for more recent RRFS model runs; the user needs to verify that data exists for needed dates. + SDF Suite Definition File. An external file containing information about the construction of a physics suite. It describes the schemes that are called, in which order they are called, whether they are subcycled, and whether they are assembled into groups to be called together. diff --git a/parm/data_locations.yml b/parm/data_locations.yml index 7901f4c085..e65a796739 100644 --- a/parm/data_locations.yml +++ b/parm/data_locations.yml @@ -236,6 +236,20 @@ RAP: file_names: <<: *rap_file_names +RRFS: + hpss: + protocol: htar + file_names: &rrfs_file_names + anl: + - rrfs.t{hh}z.prslev.f{fcst_hr:03d}.conus.grib2 + fcst: + - rrfs.t{hh}z.prslev.f{fcst_hr:03d}.conus.grib2 + aws: + protocol: download + url: https://noaa-rrfs-pds.s3.amazonaws.com/rrfs_a/rrfs_a.{yyyymmdd}/{hh}/control/ + file_names: + <<: *rrfs_file_names + HRRR: hpss: protocol: htar diff --git a/parm/wflow/plot.yaml b/parm/wflow/plot.yaml index 0c98e51711..8448bc3f9e 100644 --- a/parm/wflow/plot.yaml +++ b/parm/wflow/plot.yaml @@ -52,4 +52,5 @@ metatask_plot_allvars: task: run_post_mem#mem#_f#fhr# taskdep: attrs: - task: run_post_mem#mem#_f#fhr# \ No newline at end of file + task: run_post_mem#mem#_f#fhr# + diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh index 8cd49076b0..debf526798 100755 --- a/scripts/exregional_make_ics.sh +++ b/scripts/exregional_make_ics.sh @@ -197,6 +197,7 @@ case "${CCPP_PHYS_SUITE}" in "FV3_HRRR" | \ "FV3_RAP" ) if [ "${EXTRN_MDL_NAME_ICS}" = "RAP" ] || \ + [ "${EXTRN_MDL_NAME_ICS}" = "RRFS" ] || \ [ "${EXTRN_MDL_NAME_ICS}" = "HRRR" ]; then varmap_file="GSDphys_var_map.txt" elif [ "${EXTRN_MDL_NAME_ICS}" = "NAM" ] || \ @@ -245,7 +246,7 @@ esac # # fn_grib2: # Name (not including path) of the grib2 file generated by the external -# model. Currently used for NAM, RAP, and HRRR external model data. +# model. Currently used for NAM, RAP, and HRRR/RRFS external model data. # # input_type: # The "type" of input being provided to chgres_cube. This contains a combi- @@ -321,7 +322,7 @@ esac # tracers_input(:), it must also be 3rd in tracers(:). How can this be checked? # # NOTE: Really should use a varmap table for GFS, just like we do for -# RAP/HRRR. +# RAP/HRRR/RRFS. # # A non-prognostic variable that appears in the field_table for GSD physics # is cld_amt. Why is that in the field_table at all (since it is a non- @@ -354,7 +355,7 @@ convert_nst="" # # If the external model is not one that uses the RUC land surface model # (LSM) -- which currently includes all valid external models except the -# HRRR and the RAP -- then we set the number of soil levels to include +# HRRR/RRFS and the RAP -- then we set the number of soil levels to include # in the output NetCDF file that chgres_cube generates (nsoill_out; this # is a variable in the namelist that chgres_cube reads in) to 4. This # is because FV3 can handle this regardless of the LSM that it is using @@ -365,7 +366,7 @@ convert_nst="" # 4 soil layers to the 9 layers that it uses. # # On the other hand, if the external model is one that uses the RUC LSM -# (currently meaning that it is either the HRRR or the RAP), then what +# (currently meaning that it is either the HRRR/RRFS or the RAP), then what # we set nsoill_out to depends on whether the RUC or the Noah/Noah MP # LSM is used in the SDF. If the SDF uses RUC, then both the external # model and FV3 use RUC (which expects 9 soil levels), so we simply set @@ -379,12 +380,13 @@ convert_nst="" # 9 to 4 levels. # # In summary, we can set nsoill_out to 4 unless the external model is -# the HRRR or RAP AND the forecast model is using the RUC LSM. +# the HRRR/RRFS or RAP AND the forecast model is using the RUC LSM. # #----------------------------------------------------------------------- # nsoill_out="4" if [ "${EXTRN_MDL_NAME_ICS}" = "HRRR" -o \ + "${EXTRN_MDL_NAME_ICS}" = "RRFS" -o \ "${EXTRN_MDL_NAME_ICS}" = "RAP" ] && \ [ $(boolify "${SDF_USES_RUC_LSM}") = "TRUE" ]; then nsoill_out="9" @@ -393,7 +395,7 @@ fi #----------------------------------------------------------------------- # # If the external model for ICs is one that does not provide the aerosol -# fields needed by Thompson microphysics (currently only the HRRR and +# fields needed by Thompson microphysics (currently only the HRRR/RRFS and # RAP provide aerosol data) and if the physics suite uses Thompson # microphysics, set the variable thomp_mp_climo_file in the chgres_cube # namelist to the full path of the file containing aerosol climatology @@ -405,6 +407,7 @@ fi # thomp_mp_climo_file="" if [ "${EXTRN_MDL_NAME_ICS}" != "HRRR" -a \ + "${EXTRN_MDL_NAME_ICS}" != "RRFS" -a \ "${EXTRN_MDL_NAME_ICS}" != "RAP" ] && \ [ $(boolify "${SDF_USES_THOMPSON_MP}") = "TRUE" ]; then thomp_mp_climo_file="${THOMPSON_MP_CLIMO_FP}" @@ -519,8 +522,9 @@ case "${EXTRN_MDL_NAME_ICS}" in tg3_from_soil=False ;; -"HRRR") +"HRRR"|"RRFS") external_model="HRRR" + fn_grib2="${EXTRN_MDL_FNS[0]}" input_type="grib2" # diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh index 35b4da388a..acbe97a56b 100755 --- a/scripts/exregional_make_lbcs.sh +++ b/scripts/exregional_make_lbcs.sh @@ -196,6 +196,7 @@ case "${CCPP_PHYS_SUITE}" in "FV3_HRRR" | \ "FV3_RAP") if [ "${EXTRN_MDL_NAME_LBCS}" = "RAP" ] || \ + [ "${EXTRN_MDL_NAME_LBCS}" = "RRFS" ] || \ [ "${EXTRN_MDL_NAME_LBCS}" = "HRRR" ]; then varmap_file="GSDphys_var_map.txt" elif [ "${EXTRN_MDL_NAME_LBCS}" = "NAM" ] || \ @@ -239,7 +240,7 @@ esac # # fn_grib2: # Name (not including path) of the grib2 file generated by the external -# model. Currently used for NAM, RAP, and HRRR external model data. +# model. Currently used for NAM, RAP, and HRRR/RRFS external model data. # # input_type: # The "type" of input being provided to chgres_cube. This contains a combi- @@ -294,7 +295,7 @@ esac # tracers_input(:), it must also be 3rd in tracers(:). How can this be checked? # # NOTE: Really should use a varmap table for GFS, just like we do for -# RAP/HRRR. +# RAP/HRRR/RRFS. # # A non-prognostic variable that appears in the field_table for GSD physics @@ -318,7 +319,7 @@ tracers="\"\"" #----------------------------------------------------------------------- # # If the external model for LBCs is one that does not provide the aerosol -# fields needed by Thompson microphysics (currently only the HRRR and +# fields needed by Thompson microphysics (currently only the HRRR/RRFS and # RAP provide aerosol data) and if the physics suite uses Thompson # microphysics, set the variable thomp_mp_climo_file in the chgres_cube # namelist to the full path of the file containing aerosol climatology @@ -330,6 +331,7 @@ tracers="\"\"" # thomp_mp_climo_file="" if [ "${EXTRN_MDL_NAME_LBCS}" != "HRRR" -a \ + "${EXTRN_MDL_NAME_LBCS}" != "RRFS" -a \ "${EXTRN_MDL_NAME_LBCS}" != "RAP" ] && \ [ $(boolify "${SDF_USES_THOMPSON_MP}") = "TRUE" ]; then thomp_mp_climo_file="${THOMPSON_MP_CLIMO_FP}" @@ -401,7 +403,7 @@ case "${EXTRN_MDL_NAME_LBCS}" in input_type="grib2" ;; -"HRRR") +"HRRR"|"RRFS") external_model="HRRR" input_type="grib2" ;; @@ -502,6 +504,9 @@ for (( ii=0; ii<${num_fhrs}; ii=ii+bcgrpnum10 )); do "HRRR") fn_grib2="${EXTRN_MDL_FNS[$i]}" ;; + "RRFS") + fn_grib2="${EXTRN_MDL_FNS[$i]}" + ;; "NAM") fn_grib2="${EXTRN_MDL_FNS[$i]}" ;; diff --git a/tests/WE2E/machine_suites/comprehensive b/tests/WE2E/machine_suites/comprehensive index 8c546918a0..8397e5d0c0 100644 --- a/tests/WE2E/machine_suites/comprehensive +++ b/tests/WE2E/machine_suites/comprehensive @@ -57,6 +57,7 @@ grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta grid_RRFS_CONUScompact_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta +grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta grid_RRFS_CONUScompact_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta diff --git a/tests/WE2E/machine_suites/comprehensive.derecho b/tests/WE2E/machine_suites/comprehensive.derecho index a28718a10a..5464a053d8 100644 --- a/tests/WE2E/machine_suites/comprehensive.derecho +++ b/tests/WE2E/machine_suites/comprehensive.derecho @@ -48,6 +48,7 @@ grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta grid_RRFS_CONUScompact_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta +grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta grid_RRFS_CONUScompact_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta diff --git a/tests/WE2E/machine_suites/comprehensive.noaacloud b/tests/WE2E/machine_suites/comprehensive.noaacloud index 6c01bd70a8..c9bb96ae64 100644 --- a/tests/WE2E/machine_suites/comprehensive.noaacloud +++ b/tests/WE2E/machine_suites/comprehensive.noaacloud @@ -37,6 +37,7 @@ grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta grid_RRFS_CONUScompact_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta +grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta grid_RRFS_CONUScompact_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta diff --git a/tests/WE2E/machine_suites/comprehensive.orion b/tests/WE2E/machine_suites/comprehensive.orion index ce71fe05db..5930843582 100644 --- a/tests/WE2E/machine_suites/comprehensive.orion +++ b/tests/WE2E/machine_suites/comprehensive.orion @@ -48,6 +48,7 @@ grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta grid_RRFS_CONUScompact_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta +grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta grid_RRFS_CONUScompact_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta diff --git a/tests/WE2E/machine_suites/coverage.orion b/tests/WE2E/machine_suites/coverage.orion index c698648b10..5cb4441437 100644 --- a/tests/WE2E/machine_suites/coverage.orion +++ b/tests/WE2E/machine_suites/coverage.orion @@ -5,6 +5,7 @@ grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta grid_RRFS_AK_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_RRFS_v1beta grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR +grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta grid_RRFS_CONUScompact_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta.yaml new file mode 100644 index 0000000000..908b79dc43 --- /dev/null +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta.yaml @@ -0,0 +1,29 @@ +metadata: + description: |- + This test is to ensure that the workflow running in community mode + completes successfully on the RRFS_CONUScompact_25km grid using the RRFS_v1beta + physics suite. It uses RRFS forecasts mapped onto 3-km regular grid (rrfs*.conus.grib2) for + ICs and LBCs. This test uses old v1 sfc_data, not the v2 fractional grid sfc_data. +user: + RUN_ENVIR: community +workflow: + CCPP_PHYS_SUITE: FV3_RRFS_v1beta + PREDEF_GRID_NAME: RRFS_CONUScompact_25km + DATE_FIRST_CYCL: '2024060517' + DATE_LAST_CYCL: '2024060517' + FCST_LEN_HRS: 3 + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: RRFS + FV3GFS_FILE_FMT_ICS: grib2 + USE_USER_STAGED_EXTRN_FILES: true +task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: RRFS + LBC_SPEC_INTVL_HRS: 1 + FV3GFS_FILE_FMT_LBCS: grib2 + USE_USER_STAGED_EXTRN_FILES: true +task_plot_allvars: + COMOUT_REF: "" +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml"]|include }}' diff --git a/ush/setup.py b/ush/setup.py index 51d5b2a084..335ce229e1 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -1447,8 +1447,8 @@ def dict_find(user_dict, substring): # If the model ICs or BCs are not from RAP or HRRR, they will not contain aerosol # climatology data needed by the Thompson scheme, so we need to provide a separate file - if (get_extrn_ics["EXTRN_MDL_NAME_ICS"] not in ["HRRR", "RAP"] or - get_extrn_lbcs["EXTRN_MDL_NAME_LBCS"] not in ["HRRR", "RAP"]): + if (get_extrn_ics["EXTRN_MDL_NAME_ICS"] not in ["HRRR", "RRFS", "RAP"] or + get_extrn_lbcs["EXTRN_MDL_NAME_LBCS"] not in ["HRRR", "RRFS", "RAP"]): fixed_files["THOMPSON_FIX_FILES"].append(workflow_config["THOMPSON_MP_CLIMO_FN"]) # Add thompson-specific fix files to CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING and diff --git a/ush/valid_param_vals.yaml b/ush/valid_param_vals.yaml index 3530b51ae9..fd21b3e1cf 100644 --- a/ush/valid_param_vals.yaml +++ b/ush/valid_param_vals.yaml @@ -37,8 +37,8 @@ valid_vals_CCPP_PHYS_SUITE: [ "FV3_RAP" ] valid_vals_GFDLgrid_NUM_CELLS: [48, 96, 192, 384, 768, 1152, 3072] -valid_vals_EXTRN_MDL_NAME_ICS: ["GSMGFS", "FV3GFS", "UFS-CASE-STUDY", "GEFS", "GDAS", "RAP", "HRRR", "NAM"] -valid_vals_EXTRN_MDL_NAME_LBCS: ["GSMGFS", "FV3GFS", "UFS-CASE-STUDY", "GEFS", "GDAS", "RAP", "HRRR", "NAM"] +valid_vals_EXTRN_MDL_NAME_ICS: ["GSMGFS", "FV3GFS", "UFS-CASE-STUDY", "GEFS", "GDAS", "RAP", "HRRR", "RRFS", "NAM"] +valid_vals_EXTRN_MDL_NAME_LBCS: ["GSMGFS", "FV3GFS", "UFS-CASE-STUDY", "GEFS", "GDAS", "RAP", "HRRR", "RRFS", "NAM"] valid_vals_USE_USER_STAGED_EXTRN_FILES: [True, False] valid_vals_FV3GFS_FILE_FMT_ICS: ["nemsio", "grib2", "netcdf"] valid_vals_FV3GFS_FILE_FMT_LBCS: ["nemsio", "grib2", "netcdf"]