diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile index 86af5dded4..5b90ab1173 100644 --- a/.cicd/Jenkinsfile +++ b/.cicd/Jenkinsfile @@ -5,18 +5,23 @@ pipeline { disableConcurrentBuilds() overrideIndexTriggers(false) skipDefaultCheckout(true) + timestamps() + timeout(time: 12, unit: 'HOURS') } parameters { // Allow job runner to filter based on platform // Use the line below to enable all PW clusters - // choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'cheyenne', 'gaeac5', 'hera', 'jet', 'orion', 'hercules', 'pclusternoaav2use1', 'azclusternoaav2eus1', 'gclusternoaav2usc1'], description: 'Specify the platform(s) to use') + // choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'cheyenne', 'gaea', 'hera', 'jet', 'orion', 'hercules', 'pclusternoaav2use1', 'azclusternoaav2eus1', 'gclusternoaav2usc1'], description: 'Specify the platform(s) to use') // Use the line below to enable the PW AWS cluster - // choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'cheyenne', 'gaeac5', 'hera', 'jet', 'orion', 'hercules', 'pclusternoaav2use1'], description: 'Specify the platform(s) to use') - // choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'derecho', 'gaeac5', 'hera', 'jet', 'orion', 'hercules'], description: 'Specify the platform(s) to use') - choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'derecho', 'hera', 'jet', 'orion', 'hercules'], description: 'Specify the platform(s) to use') + // choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'cheyenne', 'gaea', 'hera', 'jet', 'orion', 'hercules', 'pclusternoaav2use1'], description: 'Specify the platform(s) to use') + choice(name: 'SRW_PLATFORM_FILTER', choices: ['all', 'derecho', 'gaea', 'hera', 'jet', 'orion', 'hercules'], description: 'Specify the platform(s) to use') // Allow job runner to filter based on compiler choice(name: 'SRW_COMPILER_FILTER', choices: ['all', 'gnu', 'intel'], description: 'Specify the compiler(s) to use to build') + // Workflow Wrapper test depth {0..9}, 0=none, 1=simple, 9=all [default] + choice(name: 'SRW_WRAPPER_TASK_DEPTH', choices: ['9', '1', '0'], description: '0=none, 1=simple, 9=all [default]') + // WE2E Tests ? + choice(name: 'SRW_WE2E_SINGLE_TEST', choices: ['coverage', 'none', 'skill-score', 'grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0'], description: 'Specify the WE2E test to use') booleanParam name: 'SRW_WE2E_COMPREHENSIVE_TESTS', defaultValue: false, description: 'Whether to execute the comprehensive end-to-end tests' } @@ -24,9 +29,15 @@ pipeline { stage('Launch SonarQube') { steps { script { + echo "BRANCH_NAME=${env.CHANGE_BRANCH}" + echo "FORK_NAME=${env.CHANGE_FORK}" + echo "CHANGE_URL=${env.CHANGE_URL}" + echo "CHANGE_ID=${env.CHANGE_ID}" build job: '/ufs-srweather-app/ufs-srw-sonarqube', parameters: [ string(name: 'BRANCH_NAME', value: env.CHANGE_BRANCH ?: 'develop'), - string(name: 'FORK_NAME', value: env.CHANGE_FORK ?: '') + string(name: 'FORK_NAME', value: env.CHANGE_FORK ?: ''), + string(name: 'CHANGE_URL', value: env.CHANGE_URL ?: ''), + string(name: 'CHANGE_ID', value: env.CHANGE_ID ?: '') ], wait: false } } @@ -71,6 +82,11 @@ pipeline { // Run on all platform/compiler combinations by default or build and test only on the platform(s) and // compiler(s) specified by SRW_PLATFORM_FILTER and SRW_COMPILER_FILTER when { + beforeAgent true + expression { + return nodesByLabel(env.SRW_PLATFORM).size() > 0 + } + allOf { anyOf { expression { params.SRW_PLATFORM_FILTER == 'all' } @@ -87,8 +103,7 @@ pipeline { axes { axis { name 'SRW_PLATFORM' - // values 'derecho', 'gaeac5', 'hera', 'jet', 'orion', 'hercules' //, 'pclusternoaav2use1', 'azclusternoaav2eus1', 'gclusternoaav2usc1' - values 'derecho', 'hera', 'jet', 'orion', 'hercules' //, 'pclusternoaav2use1', 'azclusternoaav2eus1', 'gclusternoaav2usc1' + values 'derecho', 'gaea', 'hera', 'jet', 'orion', 'hercules' //, 'pclusternoaav2use1', 'azclusternoaav2eus1', 'gclusternoaav2usc1' } axis { @@ -102,8 +117,7 @@ pipeline { exclude { axis { name 'SRW_PLATFORM' - // values 'derecho', 'gaeac5', 'jet', 'orion', 'hercules' //, 'pclusternoaav2use1' , 'azclusternoaav2eus1', 'gclusternoaav2usc1' - values 'derecho', 'jet', 'orion', 'hercules' //, 'pclusternoaav2use1' , 'azclusternoaav2eus1', 'gclusternoaav2usc1' + values 'derecho', 'gaea', 'jet', 'orion', 'hercules' //, 'pclusternoaav2use1' , 'azclusternoaav2eus1', 'gclusternoaav2usc1' } axis { @@ -129,20 +143,33 @@ pipeline { stage('Initialize') { steps { dir ("${env.SRW_PLATFORM}") { - echo "Initializing SRW (${env.SRW_COMPILER}) build environment on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" + echo "${env.STAGE_NAME} SRW (${env.SRW_COMPILER}) build environment on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" cleanWs() checkout scm - sh '"${WORKSPACE}/${SRW_PLATFORM}/manage_externals/checkout_externals"' + sh '"${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/srw_init.sh"' + sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/disk_usage.sh"' + } + } + + post { + always { + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-time-srw_init.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-disk-usage${env.STAGE_NAME}.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } } // Run the unified build script; if successful create a tarball of the build and upload to S3 stage('Build') { + options { + timeout(time: 4, unit: 'HOURS') + } + steps { dir ("${env.SRW_PLATFORM}") { - echo "Building SRW (${env.SRW_COMPILER}) on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" + echo "${env.STAGE_NAME} SRW (${env.SRW_COMPILER}) on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" sh 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/srw_build.sh"' + sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/disk_usage.sh"' } } @@ -151,6 +178,11 @@ pipeline { sh 'cd "${WORKSPACE}/${SRW_PLATFORM}/${INSTALL_NAME}" && tar --create --gzip --verbose --file "${WORKSPACE}/${SRW_PLATFORM}/${BUILD_NAME}.tgz" *' s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: true, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/${env.BUILD_NAME}.tgz", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: true, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/build_${env.SRW_COMPILER}/srw_build-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } + always { + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-env.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-time-srw_build.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-disk-usage${env.STAGE_NAME}.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + } } } @@ -166,9 +198,13 @@ pipeline { // Try a few Workflow Task scripts to make sure E2E tests can be launched in a follow-on 'Test' stage stage('Functional WorkflowTaskTests') { + environment { + TASK_DEPTH = "${params.SRW_WRAPPER_TASK_DEPTH}" + } + steps { dir ("${env.SRW_PLATFORM}") { - echo "Running simple workflow script task tests on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" + echo "Running ${TASK_DEPTH} simple workflow script task tests on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" sh 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/wrapper_srw_ftest.sh"' } } @@ -176,17 +212,22 @@ pipeline { // Run the unified test script stage('Test') { + options { + timeout(time: 8, unit: 'HOURS') + } + environment { SRW_WE2E_EXPERIMENT_BASE_DIR = "${env.WORKSPACE}/${env.SRW_PLATFORM}/expt_dirs" } steps { dir ("${env.SRW_PLATFORM}") { - echo "Testing SRW (${env.SRW_COMPILER}) on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" + echo "${env.STAGE_NAME} SRW (${env.SRW_COMPILER}) on ${env.SRW_PLATFORM} (using ${env.WORKSPACE}/${env.SRW_PLATFORM})" // If executing for a Pull Request, check for the run_we2e_comprehensive_tests. If set, // override the value of the SRW_WE2E_COMPREHENSIVE_TESTS parameter script { + def single_test = params.SRW_WE2E_SINGLE_TEST def run_we2e_comprehensive_tests = params.SRW_WE2E_COMPREHENSIVE_TESTS def run_we2e_comprehensive_tests_label = 'run_we2e_comprehensive_tests' @@ -198,18 +239,25 @@ pipeline { } } - sh "SRW_WE2E_COMPREHENSIVE_TESTS=${run_we2e_comprehensive_tests}" + ' bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/srw_test.sh"' - } + sh "SRW_WE2E_COMPREHENSIVE_TESTS=${run_we2e_comprehensive_tests} SRW_WE2E_SINGLE_TEST=${single_test}" + ' bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/srw_test.sh"' + + } + sh "STAGE_NAME=${env.STAGE_NAME} " + 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/disk_usage.sh"' } } post { + success { + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/*-skill-score.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + } always { // Archive the test log files - sh 'cd "${SRW_WE2E_EXPERIMENT_BASE_DIR}" && tar --create --gzip --verbose --dereference --file "${WORKSPACE}/${SRW_PLATFORM}/we2e_test_logs-${SRW_PLATFORM}-${SRW_COMPILER}.tgz" */log.generate_FV3LAM_wflow */log/* ${WORKSPACE}/${SRW_PLATFORM}/tests/WE2E/WE2E_tests_*yaml WE2E_summary*txt ${WORKSPACE}/${SRW_PLATFORM}/tests/WE2E/log.*' + sh "[[ -d ${SRW_WE2E_EXPERIMENT_BASE_DIR} ]] && cd ${SRW_WE2E_EXPERIMENT_BASE_DIR} && tar --create --gzip --verbose --dereference --file ${env.WORKSPACE}/${env.SRW_PLATFORM}/we2e_test_logs-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.tgz */log.generate_FV3LAM_wflow */log/* ${env.WORKSPACE}/${env.SRW_PLATFORM}/tests/WE2E/WE2E_tests_*yaml WE2E_summary*txt ${env.WORKSPACE}/${env.SRW_PLATFORM}/tests/WE2E/log.* || cat /dev/null > ${env.WORKSPACE}/${env.SRW_PLATFORM}/we2e_test_logs-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.tgz" + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/*_test_results-*-*.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/we2e_test_logs-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.tgz", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-time-srw_test.json", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] + s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}-*-disk-usage${env.STAGE_NAME}.csv", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] // Remove the data sets from the experiments directory to conserve disk space sh 'find "${SRW_WE2E_EXPERIMENT_BASE_DIR}" -regextype posix-extended -regex "^.*(orog|[0-9]{10})$" -type d | xargs rm -rf' - s3Upload consoleLogLevel: 'INFO', dontSetBuildResultOnFailure: false, dontWaitForConcurrentBuildCompletion: false, entries: [[bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/*_test_results-*-*.txt", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false], [bucket: 'noaa-epic-prod-jenkins-artifacts', excludedFile: '', flatten: false, gzipFiles: false, keepForever: false, managedArtifacts: true, noUploadOnFailure: false, selectedRegion: 'us-east-1', showDirectlyInBrowser: false, sourceFile: "${env.SRW_PLATFORM}/we2e_test_logs-${env.SRW_PLATFORM}-${env.SRW_COMPILER}.tgz", storageClass: 'STANDARD', uploadFromSlave: false, useServerSideEncryption: false]], pluginFailureResultConstraint: 'FAILURE', profileName: 'main', userMetadata: [] } } } @@ -217,13 +265,23 @@ pipeline { } } } + // end of stages{} - // Uncomment the following block to re-enable PW clusters - /* post { always { - // Stop any Parallel Works clusters that were started during the pipeline execution script { + // Trigger another job to collect all build statistics + CI_JOB_NAME=env.JOB_NAME.replace("/${env.JOB_BASE_NAME}","") + CI_BRANCH_NAME=env.JOB_BASE_NAME.replace("%2F","%252F") + echo "post: Triggering ufs-srweather-app/ufs-srw-metrics job for ${CI_JOB_NAME} on branch build ${CI_BRANCH_NAME}/${env.BUILD_NUMBER} ..." + build job: '/ufs-srweather-app/ufs-srw-metrics', parameters: [ + string(name: 'CI_JOB_NAME', value: "${CI_JOB_NAME}"), + string(name: 'CI_BUILD_NUMBER', value: "${CI_BRANCH_NAME}/${env.BUILD_NUMBER}") + ], wait: false + + // Uncomment the following block to re-enable PW clusters + /* + // Stop any Parallel Works clusters that were started during the pipeline execution // def pw_clusters = ['pclusternoaav2use1', 'azclusternoaav2eus1', 'gclusternoaav2usc1'] def pw_clusters = ['pclusternoaav2use1'] def clusters = [] @@ -242,8 +300,8 @@ pipeline { // PW_CLUSTER_NAME parameter build job: 'parallel-works-jenkins-client/stop-cluster', parameters: [string(name: 'PW_CLUSTER_NAME', value: clusters[i])] } + */ } } } - */ } diff --git a/.cicd/scripts/disk_usage.sh b/.cicd/scripts/disk_usage.sh new file mode 100755 index 0000000000..08a482d70f --- /dev/null +++ b/.cicd/scripts/disk_usage.sh @@ -0,0 +1,48 @@ +#!/usr/bin/env bash + +# Output a CSV report of disk usage on subdirs of some path +# Usage: +# [JOB_NAME=] [BUILD_NUMBER=] [SRW_COMPILER=] [SRW_PLATFORM=] disk_usage path depth size outfile.csv +# +# args: +# directory=$1 +# depth=$2 +# size=$3 +# outfile=$4 + +[[ -n ${WORKSPACE} ]] || WORKSPACE=$(pwd) +[[ -n ${SRW_PLATFORM} ]] || SRW_PLATFORM=$(hostname -s 2>/dev/null) || SRW_PLATFORM=$(hostname 2>/dev/null) +[[ -n ${SRW_COMPILER} ]] || SRW_COMPILER=compiler + +script_dir="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)" + +# Get repository root from Jenkins WORKSPACE variable if set, otherwise, set +# relative to script directory. +declare workspace +if [[ -n "${WORKSPACE}/${SRW_PLATFORM}" ]]; then + workspace="${WORKSPACE}/${SRW_PLATFORM}" +else + workspace="$(cd -- "${script_dir}/../.." && pwd)" +fi + +echo "STAGE_NAME=${STAGE_NAME}" # from pipeline +outfile="${4:-${workspace}-${SRW_COMPILER}-disk-usage${STAGE_NAME}.csv}" + +function disk_usage() { + local directory=${1:-${PWD}} + local depth=${2:-1} + local size=${3:-k} + echo "Disk usage: ${JOB_NAME:-ci}/${SRW_PLATFORM}/$(basename $directory)" + ( + cd $directory || exit 1 + echo "Platform,Build,Owner,Group,Inodes,${size:-k}bytes,Access Time,Filename" + du -Px -d ${depth:-1} --inode --exclude='./workspace' | \ + while read line ; do + arr=($line); inode=${arr[0]}; filename=${arr[1]}; + echo "${SRW_PLATFORM}-${SRW_COMPILER:-compiler},${JOB_NAME:-ci}/${BUILD_NUMBER:-0},$(stat -c '%U,%G' $filename),${inode:-0},$(du -Px -s -${size:-k} --time $filename)" | tr '\t' ',' ; + done | sort -t, -k5 -n #-r + ) + echo "" +} + +disk_usage $1 $2 $3 | tee ${outfile} diff --git a/.cicd/scripts/qsub_srw_ftest.sh b/.cicd/scripts/qsub_srw_ftest.sh index e9f0170a05..8b2569ca69 100644 --- a/.cicd/scripts/qsub_srw_ftest.sh +++ b/.cicd/scripts/qsub_srw_ftest.sh @@ -9,7 +9,5 @@ #PBS -l select=1:ncpus=24:mpiprocs=24:ompthreads=1 #PBS -l walltime=00:30:00 #PBS -V -#PBS -o log_wrap.%j.log -#PBS -e err_wrap.%j.err bash ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/srw_ftest.sh diff --git a/.cicd/scripts/sbatch_srw_ftest.sh b/.cicd/scripts/sbatch_srw_ftest.sh index 5add5368b5..07888d5ae9 100644 --- a/.cicd/scripts/sbatch_srw_ftest.sh +++ b/.cicd/scripts/sbatch_srw_ftest.sh @@ -7,7 +7,7 @@ #SBATCH --account=${SRW_PROJECT} #SBATCH --qos=batch #SBATCH --nodes=1 -#SBATCH --tasks-per-node=24 +#SBATCH --tasks-per-node=12 #SBATCH --cpus-per-task=1 #SBATCH -t 00:30:00 #SBATCH -o log_wrap.%j.log diff --git a/.cicd/scripts/srw_build.sh b/.cicd/scripts/srw_build.sh index 196d984a05..25546561eb 100755 --- a/.cicd/scripts/srw_build.sh +++ b/.cicd/scripts/srw_build.sh @@ -27,7 +27,8 @@ fi # Build and install cd ${workspace}/tests set +e -./build.sh ${platform} ${SRW_COMPILER} +/usr/bin/time -p -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' -o ${WORKSPACE}/${SRW_PLATFORM}-${SRW_COMPILER}-time-srw_build.json \ + ./build.sh ${platform} ${SRW_COMPILER} build_exit=$? set -e cd - @@ -35,6 +36,6 @@ cd - # Create combined log file for upload to s3 build_dir="${workspace}/build_${SRW_COMPILER}" cat ${build_dir}/log.cmake ${build_dir}/log.make \ - >${build_dir}/srw_build-${platform}-${SRW_COMPILER}.txt + >${build_dir}/srw_build-${SRW_PLATFORM}-${SRW_COMPILER}.txt exit $build_exit diff --git a/.cicd/scripts/srw_ftest.sh b/.cicd/scripts/srw_ftest.sh index 95d5e2f936..95530a89aa 100755 --- a/.cicd/scripts/srw_ftest.sh +++ b/.cicd/scripts/srw_ftest.sh @@ -46,7 +46,6 @@ fi # Test directories we2e_experiment_base_dir="${workspace}/expt_dirs" we2e_test_dir="${workspace}/tests/WE2E" -nco_dir="${workspace}/nco_dirs" pwd @@ -67,6 +66,9 @@ sed "s|^workflow:|workflow:\n EXEC_SUBDIR: ${workspace}/install_${SRW_COMPILER} # Decrease forecast length since we are running all the steps sed "s|^ FCST_LEN_HRS: 12| FCST_LEN_HRS: 6|g" -i ush/config.yaml +# Update compiler +sed "s|^ COMPILER: intel| COMPILER: ${SRW_COMPILER}|g" -i ush/config.yaml + # DATA_LOCATION differs on each platform ... find it. export DATA_LOCATION=$(grep TEST_EXTRN_MDL_SOURCE_BASEDIR ${workspace}/ush/machine/${platform,,}.yaml | awk '{printf "%s", $2}') echo "DATA_LOCATION=${DATA_LOCATION}" @@ -78,19 +80,19 @@ sed "s|^task_get_extrn_lbcs:|task_get_extrn_lbcs:\n EXTRN_MDL_SOURCE_BASEDIR_LB # Use staged data for HPSS supported machines sed 's|^platform:|platform:\n EXTRN_MDL_DATA_STORES: disk|g' -i ush/config.yaml +# Set OMP_NUM_THREADS_RUN_FCST to 1 in config.yaml +sed 's|^task_run_fcst:|task_run_fcst:\n OMP_NUM_THREADS_RUN_FCST: 1|1' -i ush/config.yaml + # Activate the workflow environment ... source etc/lmod-setup.sh ${platform,,} module use modulefiles module load build_${platform,,}_${SRW_COMPILER} module load wflow_${platform,,} +# Deactivate conflicting conda env on GCP +[[ "${SRW_PLATFORM}" =~ "gclusternoaa" ]] && conda deactivate [[ ${FORGIVE_CONDA} == true ]] && set +e +u # Some platforms have incomplete python3 or conda support, but wouldn't necessarily block workflow tests -# Gaea-C5 special case missing jinja2 -if [ "${platform}" == "gaea-c5" ]; then - conda activate workflow_tools -else - conda activate srw_app -fi +conda activate srw_app set -e -u # Adjust for strict limitation of stack size @@ -111,7 +113,7 @@ cp ${workspace}/ush/wrappers/*.sh . export JOBSdir=${workspace}/jobs export USHdir=${workspace}/ush export OMP_NUM_THREADS=1 -export nprocs=24 +export nprocs=12 [[ -n ${TASKS} ]] || TASKS=( run_make_grid diff --git a/.cicd/scripts/srw_init.sh b/.cicd/scripts/srw_init.sh new file mode 100755 index 0000000000..688255ac98 --- /dev/null +++ b/.cicd/scripts/srw_init.sh @@ -0,0 +1,38 @@ +#!/usr/bin/env bash +# +# A unified init script for the SRW application. This script is expected to +# fetch initial source for the SRW application for all supported platforms. +# +set -e -u -x + +script_dir="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)" + +# Get repository root from Jenkins WORKSPACE variable if set, otherwise, set +# relative to script directory. +declare workspace +if [[ -n "${WORKSPACE}/${SRW_PLATFORM}" ]]; then + workspace="${WORKSPACE}/${SRW_PLATFORM}" +else + workspace="$(cd -- "${script_dir}/../.." && pwd)" +fi + +# Normalize Parallel Works cluster platform value. +declare platform +if [[ "${SRW_PLATFORM}" =~ ^(az|g|p)clusternoaa ]]; then + platform='noaacloud' +else + platform="${SRW_PLATFORM}" +fi + +# Build and install +cd ${workspace} +set +e +/usr/bin/time -p -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' -o ${WORKSPACE}/${SRW_PLATFORM}-${SRW_COMPILER}-time-srw_init.json \ + ./manage_externals/checkout_externals +init_exit=$? +echo "STAGE_NAME=${STAGE_NAME}" +env | grep = | sort > ${WORKSPACE}/${SRW_PLATFORM}-${SRW_COMPILER}-env.txt +set -e +cd - + +exit $init_exit diff --git a/.cicd/scripts/srw_metric.sh b/.cicd/scripts/srw_metric.sh new file mode 100755 index 0000000000..8f6eed85b0 --- /dev/null +++ b/.cicd/scripts/srw_metric.sh @@ -0,0 +1,148 @@ +#!/usr/bin/env bash +# +# The goal of this script is to provide an example of performing Indy-Severe-Weather test run and compare results to reference with +# Skill score index that is calculated by MET Stat-Analysis Tools +# +# Required (these options are set in the Jenkins env): +# WORKSPACE= +# SRW_PLATFORM= +# SRW_COMPILER= +# SRW_PROJECT= +# +# Optional: +#[[ -n ${SRW_PROJECT} ]] || SRW_PROJECT="no_account" +[[ -n ${FORGIVE_CONDA} ]] || FORGIVE_CONDA=true +set -e -u -x + +BUILD_OPT=false +RUN_WE2E_OPT=false +RUN_STAT_ANLY_OPT=false + +if [[ $# -eq 0 ]]; then + BUILD_OPT=true + RUN_WE2E_OPT=true + RUN_STAT_ANLY_OPT=true +elif [[ $# -ge 4 ]]; then + echo "Too many arguments, expecting three or less" + exit 1 +else + for opt in "$@"; do + case $opt in + build) BUILD_OPT=true ;; + run_we2e) RUN_WE2E_OPT=true ;; + run_stat_anly) RUN_STAT_ANLY_OPT=true ;; + *) echo "Not valid option. Exiting!" ; exit 1 ;; + esac + done +fi + +script_dir="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)" + +# Get repository root from Jenkins WORKSPACE variable if set, otherwise, set +# relative to script directory. +declare workspace +if [[ -d "${WORKSPACE}/${SRW_PLATFORM}" ]]; then + workspace="${WORKSPACE}/${SRW_PLATFORM}" +else + workspace="$(cd -- "${script_dir}/../.." && pwd)" +fi + +# Normalize Parallel Works cluster platform value. +declare platform +if [[ "${SRW_PLATFORM}" =~ ^(az|g|p)clusternoaa ]]; then + platform='noaacloud' +else + platform="${SRW_PLATFORM}" +fi + +# Test directories +we2e_experiment_base_dir="${we2e_experiment_base_dir:=${workspace}/../expt_dirs/metric_test}" +we2e_test_dir="${we2e_test_dir:=${workspace}/tests/WE2E}" +we2e_test_name="grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0" + +pwd + +# Setup the build environment +declare srw_compiler +srw_compiler=${SRW_COMPILER} +source ${workspace}/etc/lmod-setup.sh ${platform,,} +module use ${workspace}/modulefiles +module load build_${platform,,}_${srw_compiler} + +# Build srw +if [[ ${BUILD_OPT} == true ]]; then + cd ${workspace}/tests + ./build.sh ${platform,,} ${srw_compiler} +fi +cd ${workspace} + +# Activate workflow environment +module load wflow_${platform,,} +# Deactivate conflicting conda env on GCP +[[ "${SRW_PLATFORM}" =~ "gclusternoaa" ]] && conda deactivate + +[[ ${FORGIVE_CONDA} == true ]] && set +e +u # Some platforms have incomplete python3 or conda support, but would not necessarily block workflow tests +conda activate srw_app +set -e -u + +# Run test +declare srw_project +srw_project=${SRW_PROJECT} +if [[ ${RUN_WE2E_OPT} == true ]]; then + [[ -d ${we2e_experiment_base_dir} ]] && rm -rf ${we2e_experiment_base_dir} + cd ${workspace}/tests/WE2E + ./run_WE2E_tests.py -t ${we2e_test_name} -m ${platform,,} -a ${srw_project} --expt_basedir "metric_test" --exec_subdir=install_intel/exec -q +fi +cd ${workspace} + +# Run skill-score check +if [[ ${RUN_STAT_ANLY_OPT} == true ]]; then + # Clear out data + rm -rf ${workspace}/Indy-Severe-Weather/ + # Check if metprd data exists locally otherwise get it from S3 + TEST_EXTRN_MDL_SOURCE_BASEDIR=$(grep TEST_EXTRN_MDL_SOURCE_BASEDIR ${workspace}/ush/machine/${platform}.yaml | awk '{print $NF}') + if [[ -d $(dirname ${TEST_EXTRN_MDL_SOURCE_BASEDIR})/metprd/point_stat ]] ; then + mkdir -p Indy-Severe-Weather/metprd/point_stat + cp -rp $(dirname ${TEST_EXTRN_MDL_SOURCE_BASEDIR})/metprd/point_stat Indy-Severe-Weather/metprd + elif [[ -f Indy-Severe-Weather.tgz ]]; then + tar xvfz Indy-Severe-Weather.tgz + else + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.1.0/METplus-vx-sample/Indy-Severe-Weather.tgz + tar xvfz Indy-Severe-Weather.tgz + fi + [[ -f ${SRW_PLATFORM,,}-${srw_compiler}-skill-score.txt ]] && rm ${SRW_PLATFORM,,}-${srw_compiler}-skill-score.txt + # Skill score index is computed over several terms that are defined in parm/metplus/STATAnalysisConfig_skill_score. + # It is computed by aggregating the output from earlier runs of the Point-Stat and/or Grid-Stat tools over one or more cases. + # In this example, skill score index is a weighted average of 4 skill scores of RMSE statistics for wind speed, dew point temperature, + # temperature, and pressure at lowest level in the atmosphere over 6 hour lead time. + cp ${we2e_experiment_base_dir}/${we2e_test_name}/2019061500/metprd/PointStat/*.stat ${workspace}/Indy-Severe-Weather/metprd/point_stat/ + # Remove conda for Orion due to conda env conflicts + if [[ ${platform} =~ "orion" ]]; then + sed -i 's|load("conda")|--load("conda")|g' ${workspace}/modulefiles/tasks/${platform,,}/run_vx.local.lua + fi + # Load met and metplus + module use modulefiles/tasks/${platform,,} + module load run_vx.local + # Reset Orion run_vx.local file + if [[ ${platform} =~ "orion" ]]; then + sed -i 's|--load("conda")|load("conda")|g' ${workspace}/modulefiles/tasks/${platform,,}/run_vx.local.lua + fi + # Run stat_analysis + stat_analysis -config parm/metplus/STATAnalysisConfig_skill_score -lookin ${workspace}/Indy-Severe-Weather/metprd/point_stat -v 2 -out ${SRW_PLATFORM,,}-${srw_compiler}-skill-score.txt + + # check skill-score.txt + cat ${SRW_PLATFORM,,}-${srw_compiler}-skill-score.txt + + # get skill-score (SS_INDEX) and check if it is significantly smaller than 1.0 + # A value greater than 1.0 indicates that the forecast model outperforms the reference, + # while a value less than 1.0 indicates that the reference outperforms the forecast. + tmp_string=$( tail -2 ${SRW_PLATFORM,,}-${srw_compiler}-skill-score.txt | head -1 ) + SS_INDEX=$(echo $tmp_string | awk -F " " '{print $NF}') + echo "Skill Score: ${SS_INDEX}" + if [[ ${SS_INDEX} < "0.700" ]]; then + echo "Your Skill Score is way smaller than 1.00, better check before merging" + exit 1 + else + echo "Congrats! You pass check!" + fi +fi diff --git a/.cicd/scripts/srw_metric_example.sh b/.cicd/scripts/srw_metric_example.sh deleted file mode 100755 index 2018505735..0000000000 --- a/.cicd/scripts/srw_metric_example.sh +++ /dev/null @@ -1,91 +0,0 @@ -#!/usr/bin/env bash -# -# The goal of this script is to provide an example of performing Indy-Severe-Weather test run and compare results to reference with -# Skill score index that is calculated by MET Stat-Analysis Tools -# -# Required: -# WORKSPACE= -# SRW_PLATFORM= -# SRW_COMPILER= -# -# Optional: -[[ -n ${SRW_PROJECT} ]] || SRW_PROJECT="no_account" -[[ -n ${FORGIVE_CONDA} ]] || FORGIVE_CONDA=true -set -e -u -x - -script_dir="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)" - -# Get repository root from Jenkins WORKSPACE variable if set, otherwise, set -# relative to script directory. -declare workspace -if [[ -n "${WORKSPACE}/${SRW_PLATFORM}" ]]; then - workspace="${WORKSPACE}/${SRW_PLATFORM}" -else - workspace="$(cd -- "${script_dir}/../.." && pwd)" -fi - -# Normalize Parallel Works cluster platform value. -declare platform -if [[ "${SRW_PLATFORM}" =~ ^(az|g|p)clusternoaa ]]; then - platform='noaacloud' -else - platform="${SRW_PLATFORM}" -fi - -# Test directories -we2e_experiment_base_dir="${workspace}/../expt_dirs/metric_test" -we2e_test_dir="${workspace}/tests/WE2E" -we2e_test_name="grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0" - -pwd - -# Activate the workflow environment ... -source etc/lmod-setup.sh ${platform,,} -module use modulefiles -module load build_${platform,,}_${SRW_COMPILER} -module load wflow_${platform,,} - -[[ ${FORGIVE_CONDA} == true ]] && set +e +u # Some platforms have incomplete python3 or conda support, but wouldn't necessarily block workflow tests -conda activate workflow_tools -set -e -u - -# build srw -cd ${workspace}/tests -./build.sh ${platform,,} ${SRW_COMPILER} -cd ${workspace} - -# run test -[[ -d ${we2e_experiment_base_dir} ]] && rm -rf ${we2e_experiment_base_dir} -cd ${workspace}/tests/WE2E -./run_WE2E_tests.py -t ${we2e_test_name} -m ${platform,,} -a ${SRW_PROJECT} --expt_basedir "metric_test" --exec_subdir=install_intel/exec -q -cd ${workspace} - -# run skill-score check -[[ ! -f Indy-Severe-Weather.tgz ]] && wget https://noaa-ufs-srw-pds.s3.amazonaws.com/sample_cases/release-public-v2.1.0/Indy-Severe-Weather.tgz -[[ ! -d Indy-Severe-Weather ]] && tar xvfz Indy-Severe-Weather.tgz -[[ -f skill-score.out ]] && rm skill-score.out -# Skill score index is computed over several terms that are defined in parm/metplus/STATAnalysisConfig_skill_score. -# It is computed by aggregating the output from earlier runs of the Point-Stat and/or Grid-Stat tools over one or more cases. -# In this example, skill score index is a weighted average of 4 skill scores of RMSE statistics for wind speed, dew point temperature, -# temperature, and pressure at lowest level in the atmosphere over 6 hour lead time. -cp ${we2e_experiment_base_dir}/${we2e_test_name}/2019061500/metprd/PointStat/*.stat ${workspace}/Indy-Severe-Weather/metprd/point_stat/ -# load met and metplus -module use modulefiles/tasks/${platform,,} -module load run_vx.local -stat_analysis -config parm/metplus/STATAnalysisConfig_skill_score -lookin ${workspace}/Indy-Severe-Weather/metprd/point_stat -v 2 -out skill-score.out - -# check skill-score.out -cat skill-score.out - -# get skill-score (SS_INDEX) and check if it is significantly smaller than 1.0 -# A value greater than 1.0 indicates that the forecast model outperforms the reference, -# while a value less than 1.0 indicates that the reference outperforms the forecast. -tmp_string=$( tail -2 skill-score.out | head -1 ) -SS_INDEX=$(echo $tmp_string | awk -F " " '{print $NF}') -echo "Skill Score: ${SS_INDEX}" -if [[ ${SS_INDEX} < "0.700" ]]; then - echo "Your Skill Score is way smaller than 1.00, better check before merging" - exit 1 -else - echo "Congrats! You pass check!" -fi diff --git a/.cicd/scripts/srw_test.sh b/.cicd/scripts/srw_test.sh index 1bffe083bd..90273f2730 100755 --- a/.cicd/scripts/srw_test.sh +++ b/.cicd/scripts/srw_test.sh @@ -11,7 +11,7 @@ script_dir="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd) # Get repository root from Jenkins WORKSPACE variable if set, otherwise, set # relative to script directory. declare workspace -if [[ -n "${WORKSPACE}/${SRW_PLATFORM}" ]]; then +if [[ -d "${WORKSPACE}/${SRW_PLATFORM}" ]]; then workspace="${WORKSPACE}/${SRW_PLATFORM}" else workspace="$(cd -- "${script_dir}/../.." && pwd)" @@ -26,23 +26,30 @@ else fi # Test directories -we2e_experiment_base_dir="${workspace}/expt_dirs" -we2e_test_dir="${workspace}/tests/WE2E" -nco_dir="${workspace}/nco_dirs" +export we2e_experiment_base_dir="${workspace}/expt_dirs" +export we2e_test_dir="${workspace}/tests/WE2E" + +# Clean any stale test logs +rm -f ${workspace}/tests/WE2E/log.* +rm -f ${we2e_experiment_base_dir}/*/log.generate_FV3LAM_wflow ${we2e_experiment_base_dir}/*/log/* WE2E_summary*txt # Run the end-to-end tests. if "${SRW_WE2E_COMPREHENSIVE_TESTS}"; then - test_type="comprehensive" + export test_type="comprehensive" else - test_type="coverage" + export test_type=${SRW_WE2E_SINGLE_TEST:-"coverage"} + if [[ "${test_type}" = skill-score ]]; then + export test_type="grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0" + fi fi cd ${we2e_test_dir} # Progress file -progress_file="${workspace}/we2e_test_results-${platform}-${SRW_COMPILER}.txt" -./setup_WE2E_tests.sh ${platform} ${SRW_PROJECT} ${SRW_COMPILER} ${test_type} \ - --expt_basedir=${we2e_experiment_base_dir} \ - --opsroot=${nco_dir} | tee ${progress_file} +progress_file="${workspace}/we2e_test_results-${SRW_PLATFORM}-${SRW_COMPILER}.txt" +/usr/bin/time -p -f '{\n "cpu": "%P"\n, "memMax": "%M"\n, "mem": {"text": "%X", "data": "%D", "swaps": "%W", "context": "%c", "waits": "%w"}\n, "pagefaults": {"major": "%F", "minor": "%R"}\n, "filesystem": {"inputs": "%I", "outputs": "%O"}\n, "time": {"real": "%e", "user": "%U", "sys": "%S"}\n}' -o ${WORKSPACE}/${SRW_PLATFORM}-${SRW_COMPILER}-time-srw_test.json \ + ./setup_WE2E_tests.sh ${platform} ${SRW_PROJECT} ${SRW_COMPILER} ${test_type} \ + --expt_basedir=${we2e_experiment_base_dir} | tee ${progress_file}; \ + [[ -f ${we2e_experiment_base_dir}/grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0/log.generate_FV3LAM_wflow ]] && ${workspace}/.cicd/scripts/srw_metric.sh run_stat_anly # Set exit code to number of failures set +e diff --git a/.cicd/scripts/wrapper_srw_ftest.sh b/.cicd/scripts/wrapper_srw_ftest.sh index e4afaf9e98..33fd966efa 100755 --- a/.cicd/scripts/wrapper_srw_ftest.sh +++ b/.cicd/scripts/wrapper_srw_ftest.sh @@ -15,17 +15,17 @@ declare arg_1 if [[ "${SRW_PLATFORM}" == cheyenne ]] || [[ "${SRW_PLATFORM}" == derecho ]]; then workflow_cmd=qsub arg_1="" - check_job="qstat -u ${USER} -r ${job_id}" else workflow_cmd=sbatch arg_1="--parsable" - check_job="squeue -u ${USER} -j ${job_id} --noheader" fi # Customize wrapper scripts -if [[ "${SRW_PLATFORM}" == gaea-c5 ]]; then +if [[ "${SRW_PLATFORM}" == gaea ]]; then sed -i '15i #SBATCH --clusters=c5' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh sed -i 's|qos=batch|qos=normal|g' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh + sed -i 's|00:30:00|00:45:00|g' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh + sed -i 's|${JOBSdir}/JREGIONAL_RUN_POST|$USHdir/load_modules_run_task.sh "gaea" "run_post" ${JOBSdir}/JREGIONAL_RUN_POST|g' ${WORKSPACE}/${SRW_PLATFORM}/ush/wrappers/run_post.sh fi if [[ "${SRW_PLATFORM}" == hera ]]; then @@ -34,6 +34,14 @@ if [[ "${SRW_PLATFORM}" == hera ]]; then fi fi +if [[ "${SRW_PLATFORM}" == jet ]]; then + sed -i '15i #SBATCH --partition=xjet' ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh +fi + +if [[ "${TASK_DEPTH}" == 0 ]] ; then + exit 0 +fi + # Call job card and return job_id echo "Running: ${workflow_cmd} -A ${SRW_PROJECT} ${arg_1} ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh" job_id=$(${workflow_cmd} -A ${SRW_PROJECT} ${arg_1} ${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/${workflow_cmd}_srw_ftest.sh) @@ -44,6 +52,11 @@ sleep 10 # Check for job and exit when done while true do + if [[ "${SRW_PLATFORM}" == derecho ]]; then + check_job="qstat -u ${USER} -r ${job_id}" + else + check_job="squeue -u ${USER} -j ${job_id} --noheader" + fi job_id_info=$($check_job) if [ ! -z "$job_id_info" ]; then echo "Job is still running. Check again in two minutes" @@ -54,7 +67,7 @@ do # Return exit code and check for results file first results_file="${WORKSPACE}/${SRW_PLATFORM}/functional_test_results_${SRW_PLATFORM}_${SRW_COMPILER}.txt" if [ ! -f "$results_file" ]; then - echo "Missing results file! \nexit 1" + echo -e "Missing results file! \nexit 1" exit 1 fi diff --git a/.github/PULL_REQUEST_TEMPLATE b/.github/PULL_REQUEST_TEMPLATE index 1c363c651f..29a878d4a4 100644 --- a/.github/PULL_REQUEST_TEMPLATE +++ b/.github/PULL_REQUEST_TEMPLATE @@ -30,15 +30,13 @@ -- [ ] hera.intel -- [ ] orion.intel -- [ ] hercules.intel -- [ ] cheyenne.intel -- [ ] cheyenne.gnu - [ ] derecho.intel - [ ] gaea.intel -- [ ] gaeac5.intel +- [ ] hera.gnu +- [ ] hera.intel +- [ ] hercules.intel - [ ] jet.intel +- [ ] orion.intel - [ ] wcoss2.intel - [ ] NOAA Cloud (indicate which platform) - [ ] Jenkins diff --git a/.github/workflows/python_tests.yaml b/.github/workflows/python_tests.yaml index 0e71f8d72d..fb0de16910 100644 --- a/.github/workflows/python_tests.yaml +++ b/.github/workflows/python_tests.yaml @@ -41,6 +41,8 @@ jobs: pylint --ignore-imports=yes tests/test_python/ pylint ush/create_*.py pylint ush/generate_FV3LAM_wflow.py + pylint ush/set_fv3nml*.py + pylint ush/update_input_nml.py - name: Run python unittests run: | diff --git a/.gitignore b/.gitignore index 8566703a22..ed78ca4182 100644 --- a/.gitignore +++ b/.gitignore @@ -1,9 +1,14 @@ bin/ +conda/ exec/ build/ fix/ include/ lib/ +parm/aqm_utils_parm/ +parm/nexus_config/ +parm/ufs_utils_parm/ +parm/upp_parm/ share/ sorc/*/ tests/WE2E/WE2E_tests_*.yaml @@ -11,6 +16,8 @@ tests/WE2E/*.txt tests/WE2E/*.log tests/WE2E/log.* ush/__pycache__/ +ush/aqm_utils_python/ +ush/nexus_utils/ ush/config.yaml ush/python_utils/__pycache__/ ush/python_utils/workflow-tools/ diff --git a/.readthedocs.yaml b/.readthedocs.yaml index b6afe96c93..c8ce6064b2 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -17,7 +17,7 @@ build: # Build documentation in the docs/ directory with Sphinx sphinx: - configuration: doc/UsersGuide/source/conf.py + configuration: doc/conf.py # If using Sphinx, optionally build your docs in additional formats such as PDF # formats: @@ -26,7 +26,7 @@ sphinx: # Optionally declare the Python requirements required to build your docs python: install: - - requirements: doc/UsersGuide/requirements.txt + - requirements: doc/requirements.txt submodules: include: diff --git a/Externals.cfg b/Externals.cfg index 4bae74b316..4545cd8ca5 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -3,7 +3,7 @@ protocol = git repo_url = https://github.com/ufs-community/UFS_UTILS # Specify either a branch name or a hash but not both. #branch = develop -hash = dc0e4a6 +hash = 57bd832 local_path = sorc/UFS_UTILS required = True @@ -12,7 +12,7 @@ protocol = git repo_url = https://github.com/ufs-community/ufs-weather-model # Specify either a branch name or a hash but not both. #branch = develop -hash = 020e783 +hash = b5a1976 local_path = sorc/ufs-weather-model required = True @@ -21,7 +21,7 @@ protocol = git repo_url = https://github.com/NOAA-EMC/UPP # Specify either a branch name or a hash but not both. #branch = develop -hash = fae617b +hash = be0410e local_path = sorc/UPP required = True @@ -30,7 +30,7 @@ protocol = git repo_url = https://github.com/noaa-oar-arl/NEXUS # Specify either a branch name or a hash but not both. #branch = develop -hash = 6a7a994 +hash = e153072 local_path = sorc/arl_nexus required = True @@ -39,7 +39,7 @@ protocol = git repo_url = https://github.com/NOAA-EMC/AQM-utils # Specify either a branch name or a hash but not both. #branch = develop -hash = 694a139 +hash = e236acd local_path = sorc/AQM-utils required = True diff --git a/README.md b/README.md index 3bf56f4c21..bdda52279d 100644 --- a/README.md +++ b/README.md @@ -1,13 +1,13 @@ # UFS Short-Range Weather Application -The Unified Forecast System (UFS) is a community-based, coupled, comprehensive Earth modeling system. NOAA's operational model suite for numerical weather prediction (NWP) is quickly transitioning to the UFS from a number of legacy modeling systems. The UFS enables research, development, and contribution opportunities within the broader Weather Enterprise (including government, industry, and academia). For more information about the UFS, visit the UFS Portal at https://ufscommunity.org/. +The Unified Forecast System (UFS) is a community-based, coupled, comprehensive Earth modeling system. NOAA's operational model suite for numerical weather prediction (NWP) is quickly transitioning to the UFS from a number of legacy modeling systems. The UFS enables research, development, and contribution opportunities within the broader Weather Enterprise (including government, industry, and academia). For more information about the UFS, visit the UFS Portal at https://ufs.epic.noaa.gov/. -The UFS includes multiple applications (see a complete list at https://ufscommunity.org/science/aboutapps/) that support different forecast durations and spatial domains. This documentation describes the development branch of the UFS Short-Range Weather (SRW) Application, which targets predictions of atmospheric behavior on a limited spatial domain and on time scales from minutes to several days. The development branch of the application is continually evolving as the system undergoes open development. The latest SRW App release (v2.2.0) represents a snapshot of this continuously evolving system. +The UFS includes multiple applications (see a complete list at https://ufs.epic.noaa.gov/applications/) that support different forecast durations and spatial domains. This documentation describes the development branch of the UFS Short-Range Weather (SRW) Application, which targets predictions of atmospheric behavior on a limited spatial domain and on time scales from minutes to several days. The development branch of the application is continually evolving as the system undergoes open development. The latest SRW App release (v2.2.0) represents a snapshot of this continuously evolving system. The UFS SRW App User's Guide associated with the development branch is at: https://ufs-srweather-app.readthedocs.io/en/develop/, while the guide specific to the SRW App v2.2.0 release can be found at: https://ufs-srweather-app.readthedocs.io/en/release-public-v2.2.0/. The repository is at: https://github.com/ufs-community/ufs-srweather-app. For instructions on how to clone the repository, build the code, and run the workflow, see: -- https://ufs-srweather-app.readthedocs.io/en/develop/BuildingRunningTesting/Quickstart.html +- https://ufs-srweather-app.readthedocs.io/en/develop/UsersGuide/BuildingRunningTesting/Quickstart.html For a debugging guide for users and developers in the field of Earth System Modeling, please see: https://epic.noaa.gov/wp-content/uploads/2022/12/Debugging-Guide.pdf diff --git a/aqm_environment.yml b/aqm_environment.yml index 03d72f6706..11bf9e57e3 100644 --- a/aqm_environment.yml +++ b/aqm_environment.yml @@ -9,5 +9,5 @@ dependencies: - pylint=2.17* - pytest=7.2* - scipy=1.10.* - - uwtools=1.0.0 + - uwtools=2.3* - xarray=2022.11.* diff --git a/devbuild.sh b/devbuild.sh index 9136b86e7a..014fbdb3b7 100755 --- a/devbuild.sh +++ b/devbuild.sh @@ -212,11 +212,6 @@ printf "PLATFORM(MACHINE)=${PLATFORM}\n" >&2 if [ "${PLATFORM}" = "wcoss2" ]; then BUILD_CONDA="off" fi -# Conda is not used on Gaea-c5 F2 filesystem -# it needs to be reevaluated when moved to F2 filesystem -if [ "${PLATFORM}" = "gaea-c5" ]; then - BUILD_CONDA="off" -fi # build conda and conda environments, if requested. if [ "${BUILD_CONDA}" = "on" ] ; then @@ -288,7 +283,7 @@ set -eu # automatically determine compiler if [ -z "${COMPILER}" ] ; then case ${PLATFORM} in - jet|hera|gaea-c5) COMPILER=intel ;; + jet|hera|gaea) COMPILER=intel ;; orion) COMPILER=intel ;; wcoss2) COMPILER=intel ;; cheyenne) COMPILER=intel ;; @@ -507,4 +502,38 @@ else fi fi +# Copy config/python directories from component to main directory (EE2 compliance) +if [ "${BUILD_UFS_UTILS}" = "on" ]; then + if [ -d "${SRW_DIR}/parm/ufs_utils_parm" ]; then + rm -rf ${SRW_DIR}/parm/ufs_utils_parm + fi + cp -rp ${SRW_DIR}/sorc/UFS_UTILS/parm ${SRW_DIR}/parm/ufs_utils_parm +fi +if [ "${BUILD_UPP}" = "on" ]; then + if [ -d "${SRW_DIR}/parm/upp_parm" ]; then + rm -rf ${SRW_DIR}/parm/upp_parm + fi + cp -rp ${SRW_DIR}/sorc/UPP/parm ${SRW_DIR}/parm/upp_parm +fi +if [ "${BUILD_NEXUS}" = "on" ]; then + if [ -d "${SRW_DIR}/parm/nexus_config" ]; then + rm -rf ${SRW_DIR}/parm/nexus_config + fi + cp -rp ${SRW_DIR}/sorc/arl_nexus/config ${SRW_DIR}/parm/nexus_config + if [ -d "${SRW_DIR}/ush/nexus_utils" ]; then + rm -rf ${SRW_DIR}/ush/nexus_utils + fi + cp -rp ${SRW_DIR}/sorc/arl_nexus/utils ${SRW_DIR}/ush/nexus_utils +fi +if [ "${BUILD_AQM_UTILS}" = "on" ]; then + if [ -d "${SRW_DIR}/parm/aqm_utils_parm" ]; then + rm -rf ${SRW_DIR}/parm/aqm_utils_parm + fi + cp -rp ${SRW_DIR}/sorc/AQM-utils/parm ${SRW_DIR}/parm/aqm_utils_parm + if [ -d "${SRW_DIR}/ush/aqm_utils_python" ]; then + rm -rf ${SRW_DIR}/ush/aqm_utils_python + fi + cp -rp ${SRW_DIR}/sorc/AQM-utils/python_utils ${SRW_DIR}/ush/aqm_utils_python +fi + exit 0 diff --git a/devclean.sh b/devclean.sh index 01ace7a7d9..b26988dd93 100755 --- a/devclean.sh +++ b/devclean.sh @@ -4,33 +4,31 @@ usage () { cat << EOF_USAGE -Clean the UFS-SRW Application build +Clean the UFS-SRW Application build. + +NOTE: If user included custom directories at build time, those directories must be deleted manually + Usage: $0 [OPTIONS] ... OPTIONS -h, --help - show this help guide + Show this help guide -a, --all - removes "bin", "build" directories, and other build artifacts - --remove - removes the "build" directory, keeps the "bin", "lib" and other build artifacts intact - --clean - removes "bin", "build" directories, and other build artifacts (same as "-a", "--all") - --conda - removes "conda" directory and conda_loc file in SRW - --install-dir=INSTALL_DIR - installation directory name (\${SRW_DIR} by default) - --build-dir=BUILD_DIR - main build directory, absolute path (\${SRW_DIR}/build/ by default) - --bin-dir=BIN_DIR - binary directory name ("exec" by default); full path is \${INSTALL_DIR}/\${BIN_DIR}) - --conda-dir=CONDA_DIR - directory where conda is installed. caution: if outside the SRW clone, it may have broader use - --sub-modules - remove sub-module directories. They will need to be checked out again by sourcing "\${SRW_DIR}/manage_externals/checkout_externals" before attempting subsequent builds + Remove all build artifacts, conda and submodules (equivalent to \`-b -c -s\`) + -b, --build + Remove build directories and artifacts: build/ exec/ share/ include/ lib/ lib64/ + -c, --conda + Remove "conda" directory and conda_loc file in SRW main directory + --container + For cleaning builds within the SRW containers, will remove the "container-bin" + directory rather than "exec". Has no effect if \`-b\` is not specified. + -f, --force + Remove directories as requested, without asking for user confirmation of their deletion. + -s, --sub-modules + Remove sub-module directories. They need to be checked out again by sourcing "\${SRW_DIR}/manage_externals/checkout_externals" before attempting subsequent builds -v, --verbose - provide more verbose output - + Provide more verbose output + EOF_USAGE } @@ -39,17 +37,10 @@ settings () { cat << EOF_SETTINGS Settings: - INSTALL_DIR=${INSTALL_DIR} - BUILD_DIR=${BUILD_DIR} - BIN_DIR=${BIN_DIR} - CONDA_DIR=${CONDA_DIR} - REMOVE=${REMOVE} + FORCE=${REMOVE} VERBOSE=${VERBOSE} - -Default cleaning options: (if no arguments provided, then nothing is cleaned) - REMOVE=${REMOVE} - CLEAN=${CLEAN} - INCLUDE_SUB_MODULES=${INCLUDE_SUB_MODULES} + REMOVE_SUB_MODULES=${REMOVE_SUB_MODULES} + REMOVE_CONDA=${REMOVE_CONDA} EOF_SETTINGS } @@ -63,46 +54,28 @@ usage_error () { # default settings SRW_DIR=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}" )" )" && pwd -P) -INSTALL_DIR=${INSTALL_DIR:-${SRW_DIR}} -BUILD_DIR=${BUILD_DIR:-"${SRW_DIR}/build"} -BIN_DIR="exec" -CONDA_DIR=${CONDA_DIR:-"${SRW_DIR}/conda"} -REMOVE=false VERBOSE=false # default clean options REMOVE=false -CLEAN=false -INCLUDE_SUB_MODULES=false #changes to true if '--sub-modules' option is provided +REMOVE_BUILD=false +REMOVE_CONDA=false +REMOVE_SUB_MODULES=false +CONTAINER=false -# process requires arguments -if [[ ("$1" == "--help") || ("$1" == "-h") ]]; then - usage - exit 0 -fi - -# process optional arguments +# process arguments while :; do case $1 in --help|-h) usage; exit 0 ;; - --all|-a) ALL_CLEAN=true ;; - --remove) REMOVE=true ;; - --remove=?*|--remove=) usage_error "$1 argument ignored." ;; - --clean) CLEAN=true ;; - --conda) REMOVE_CONDA=true ;; - --install-dir=?*) INSTALL_DIR=${1#*=} ;; - --install-dir|--install-dir=) usage_error "$1 requires argument." ;; - --build-dir=?*) BUILD_DIR=${1#*=} ;; - --build-dir|--build-dir=) usage_error "$1 requires argument." ;; - --bin-dir=?*) BIN_DIR=${1#*=} ;; - --bin-dir|--bin-dir=) usage_error "$1 requires argument." ;; - --conda-dir=?*) CONDA_DIR=${1#*=} ;; - --conda-dir|--conda-dir=) usage_error "$1 requires argument." ;; - --sub-modules) INCLUDE_SUB_MODULES=true ;; + --all|-a) REMOVE_BUILD=true; REMOVE_CONDA=true; REMOVE_SUB_MODULES=true ;; + --build|-b) REMOVE_BUILD=true ;; + --conda|-c) REMOVE_CONDA=true ;; + --container) CONTAINER=true ;; + --force) REMOVE=true ;; + --force=?*|--force=) usage_error "$1 argument ignored." ;; + --sub-modules|-s) REMOVE_SUB_MODULES=true ;; + --sub-modules=?*|--sub-modules=) usage_error "$1 argument ignored." ;; --verbose|-v) VERBOSE=true ;; - --verbose=?*|--verbose=) usage_error "$1 argument ignored." ;; - # targets - default) ALL_CLEAN=false ;; # unknown -?*|?*) usage_error "Unknown option $1" ;; *) break ;; @@ -110,66 +83,94 @@ while :; do shift done -# choose defaults to clean -if [ "${ALL_CLEAN}" = true ]; then - CLEAN=true -fi # print settings if [ "${VERBOSE}" = true ] ; then settings fi -# clean if build directory already exists -if [ "${REMOVE}" = true ] && [ "${CLEAN}" = false ] ; then - printf '%s\n' "Remove the \"build\" directory only, BUILD_DIR = $BUILD_DIR " - [[ -d ${BUILD_DIR} ]] && rm -rf ${BUILD_DIR} && printf '%s\n' "rm -rf ${BUILD_DIR}" -elif [ "${CLEAN}" = true ]; then - printf '%s\n' "Remove build directory, bin directory, and other build artifacts " - printf '%s\n' " from the installation directory = ${INSTALL_DIR} " - - directories=( \ - "${BUILD_DIR}" \ - "${INSTALL_DIR}/${BIN_DIR}" \ - "${INSTALL_DIR}/share" \ - "${INSTALL_DIR}/include" \ - "${INSTALL_DIR}/lib" \ - "${INSTALL_DIR}/lib64" \ +# Populate "removal_list" as an array of files/directories to remove, based on user selections +declare -a removal_list='()' + +# Clean standard build artifacts +if [ ${REMOVE_BUILD} == true ]; then + removal_list=( \ + "${SRW_DIR}/build" \ + "${SRW_DIR}/share" \ + "${SRW_DIR}/include" \ + "${SRW_DIR}/lib" \ + "${SRW_DIR}/lib64" \ ) - if [ ${#directories[@]} -ge 1 ]; then - for dir in ${directories[@]}; do - [[ -d "${dir}" ]] && rm -rfv ${dir} - done - echo " " + if [ ${CONTAINER} == true ]; then + removal_list+=("${SRW_DIR}/container-bin") + else + removal_list+=("${SRW_DIR}/exec") fi fi -# Clean all the submodules if requested. Note: Need to check out them again before attempting subsequent builds, by sourcing ${SRW_DIR}/manage_externals/checkout_externals -if [ ${INCLUDE_SUB_MODULES} == true ]; then - printf '%s\n' "Removing submodules ..." + +# Clean all the submodules if requested. +if [ ${REMOVE_SUB_MODULES} == true ]; then declare -a submodules='()' - submodules=(${SRW_DIR}/sorc/*) -# echo " submodules are: ${submodules[@]} (total of ${#submodules[@]}) " - if [ ${#submodules[@]} -ge 1 ]; then - for sub in ${submodules[@]}; do [[ -d "${sub}" ]] && ( rm -rf ${sub} && printf '%s\n' "rm -rf ${sub}" ); done + submodules=(./sorc/*) + # Only add directories to make sure we don't delete CMakeLists.txt + for sub in ${submodules[@]}; do [[ -d "${sub}" ]] && removal_list+=( "${sub}" ); done + if [ "${VERBOSE}" = true ] ; then + printf '%s\n' "Note: Need to check out submodules again for any subsequent builds, " \ + " by running ${SRW_DIR}/manage_externals/checkout_externals " fi - printf '%s\n' "Note: Need to check out submodules again for any subsequent builds, " \ - " by sourcing ${SRW_DIR}/manage_externals/checkout_externals " fi -# # Clean conda if requested if [ "${REMOVE_CONDA}" = true ] ; then - printf '%s\n' "Removing conda location file" - rm -rf ${SRW_DIR}/conda_loc - printf '%s\n' "Removing conda installation" - rm -rf ${CONDA_DIR} + # Do not read "conda_loc" file to determine location of conda install; if the user has changed it to a different location + # they likely do not want to remove it! + conda_location=$(<${SRW_DIR}/conda_loc) + if [ "${VERBOSE}" = true ] ; then + echo "conda_location=$conda_location" + fi + if [ "${conda_location}" == "${SRW_DIR}/conda" ]; then + removal_list+=("${SRW_DIR}/conda_loc") + removal_list+=("${SRW_DIR}/conda") + else + echo "WARNING: location of conda build in ${SRW_DIR}/conda_loc is not the default location!" + echo "Will not attempt to remove conda!" + fi fi +# If array is empty, that means user has not selected any removal options +if [ ${#removal_list[@]} -eq 0 ]; then + usage_error "No removal options specified" +fi +while [ ${REMOVE} == false ]; do + # Make user confirm deletion of directories unless '--force' option was provided + printf "The following files/directories will be deleted:\n\n" + for i in "${removal_list[@]}"; do + echo "$i" + done + echo "" + read -p "Confirm that you want to delete these files/directories! (Yes/No): " choice + case ${choice} in + [Yy]* ) REMOVE=true ;; + [Nn]* ) echo "User chose not to delete, exiting..."; exit ;; + * ) printf "Invalid option selected.\n" ;; + esac +done + +if [ ${REMOVE} == true ]; then + for dir in ${removal_list[@]}; do + echo "Removing ${dir}" + if [ "${VERBOSE}" = true ] ; then + rm -rfv ${dir} + else + rm -rf ${dir} + fi + done + echo " " + echo "All the requested cleaning tasks have been completed" + echo " " +fi -echo " " -echo "All the requested cleaning tasks have been completed" -echo " " exit 0 diff --git a/doc/ContribGuide/code-configuration-standards.rst b/doc/ContribGuide/code-configuration-standards.rst new file mode 100644 index 0000000000..4cc043a9c2 --- /dev/null +++ b/doc/ContribGuide/code-configuration-standards.rst @@ -0,0 +1,61 @@ +================================== +Code and Configuration Standards +================================== + +General Policies +================== + +* Platform-specific settings should be handled only through configuration and modulefiles, not in code or scripts. +* For changes to the ``scripts``, ``ush``, or ``jobs`` directories, developers should follow the :nco:`NCO Guidelines <>` for what is incorporated into each layer. +* Developers should ensure that their contributions work with the most recent version of the ``ufs-srweather-app``, including all the specific up-to-date hashes of each subcomponent. +* Modifications should not break any existing supported capabilities on any supported platforms. +* Update the RST documentation files where appropriate as part of the PR. If necessary, contributors may update the documentation in a subsequent PR. In these cases, the contributor should :srw-repo:`open an issue ` reflecting the need for documentation and include the issue number and explanation in the Documentation section of their initial PR. +* Binary files will no longer be merged into the ``develop`` branch. A binary file is defined as a "non-text" file and can include ``*.png``, ``*.gif``, ``*.jp*g``, ``*.tiff``, ``*.tar``, ``*.tgz``, ``*.gz``, ``*.mod``, ``*.o``, and executables. If a binary file needs to be staged in the ``ufs-srweather-app`` repository, please add it to the wiki's repository. The command to clone the ``ufs-srweather-app``'s wiki repository is ``git clone https://github.com/ufs-community/ufs-srweather-app.wiki.git``. Users with write access to the wiki repository can add the files here and link them to the documentation as needed. Users who do not have write access to the wiki repository should reach out to @MichaelLueken and/or note this in their pull request so that the files can be added. + +SRW Application Guidelines +============================ + + +**General Coding Standards:** + +* The ``ufs-srweather-app`` repository must not contain source code for compiled programs. Only scripts and configuration files should reside in this repository. +* All bash scripts must explicitly be ``#!/bin/bash`` scripts. They should *not* be login-enabled (i.e., scripts should *not* use the ``-l`` flag). +* MacOS does not have all Linux utilities by default. Developers should ensure that they do not break any MacOS capabilities with their contribution. +* All code must be indented appropriately and conform to the style of existing scripts (e.g., local variables should be lowercase, global variables should be uppercase). + +**External Components** + +* All externals live in a single ``Externals.cfg`` file. +* Only a single hash will be maintained for any given external code base. All externals should point to this static hash (not to the top of a branch). +* All new entries in ``Externals.cfg`` must point only to authoritative repositories. In other words, entries must point to either a `ufs-community GitHub organization `__ repository or another NOAA project organization repository. + + * Temporary exceptions are made for a PR into the ``develop`` branch of ``ufs-srweather-app`` that is dependent on another PR. When the component PR is merged, the contributor must update the corresponding ``ufs-srweather-app`` PR with the hash of the component's authoritative repository. + +**Build System** + +* Each component must build with CMake +* Each component must build with Intel compilers on official :srw-wiki:`Level 1 ` platforms and with GNU or Intel compilers on other platforms. +* Each component must have a mechanism for platform independence (i.e., no hard-coded machine-specific settings outside of established environment, configuration, and modulefiles). +* Each component must build with the standard supported NCEPLIBS environment (currently `spack-stack `__). + +**Modulefiles** + +* All official platforms should have a modulefile that can be sourced to provide the appropriate Python packages and other settings for the platform. +* Each SRW component must build using the common modules located in the ``modulefiles/srw_common`` file. + + +Workflow Coding Standards +-------------------------- + +**Python Coding Standards:** + + * All new Python workflow contributions should come with an appropriate environment YAML file (similar to ``environment.yaml``, ``graphics_environment.yaml``, and ``aqm_environment.yaml``). + * Keep the use of external Python packages to a minimum for necessary workflow tasks. Currently, these include ``f90nml``, ``pyyaml``, and ``Jinja2``. + +**Workflow Design:** Follow the :nco:`NCO Guidelines <>` for what is incorporated in each layer of the workflow. This is particularly important in the ``scripts`` directory. + +**Management of the Configuration File:** New configurable options must be consistent with existing configurable options and be documented in :srw-repo:`UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst `. Add necessary checks on acceptable options where applicable. Add appropriate default values in ``config_defaults.yaml``. + +**Management of Template Files:** If a new configurable option is required in an existing template, it must be handled similarly to its counterparts in the scripts that fill in the template. For example, if a new type of namelist is introduced for a new application component, it should make use of the existing ``jinja`` framework for populating namelist settings. + +**Namelist Management:** Namelists in ``ufs-srweather-app`` are generated using a Python tool and managed by setting YAML configuration parameters. This allows for the management of multiple configuration settings with maximum flexibility and minimum duplication of information. \ No newline at end of file diff --git a/doc/ContribGuide/contributing.rst b/doc/ContribGuide/contributing.rst new file mode 100644 index 0000000000..eb995efb41 --- /dev/null +++ b/doc/ContribGuide/contributing.rst @@ -0,0 +1,338 @@ +============================ +Contributing to the SRW App +============================ + +Fork and PR Overview +===================== + +.. note:: + + Thank you to the Unified Workflow (UW) team for allowing us to adapt their Fork and PR Model overview for use in the SRW App. The original can be viewed in the `uwtools` :uw:`documentation `. + +Contributions to the ``ufs-srweather-app`` project are made via a :github-docs:`Fork` and :github-docs:`Pull Request (PR)` model. GitHub provides a thorough description of this contribution model in their `Contributing to a project` :github-docs:`Quickstart`, but the steps, with respect to ``ufs-srweather-app`` contributions, can be summarized as: + +#. :github-docs:`Create an issue ` to document proposed changes. +#. :github-docs:`Fork` the :srw-repo:`ufs-srweather-app repository<>` into your personal GitHub account. +#. :github-docs:`Clone` your fork onto your development system. +#. :github-docs:`Create a branch` in your clone for your changes. All development should take place on a branch, *not* on ``develop``. +#. :github-docs:`Make, commit, and push changes` in your clone / to your fork. +#. When your work is complete, :github-docs:`create a pull request (PR)` to merge your changes. + +For future contributions, you may delete and then recreate your fork or configure the official ``ufs-srweather-app`` repository as a :github-docs:`remote repository` on your clone and :github-docs:`sync upstream changes` to stay up-to-date with the official repository. + + +Development and Testing Process +================================= + +#. **Create issue:** Open an :srw-repo:`issue ` in the ``ufs-srweather-app`` to document proposed changes. See :ref:`Opening an Issue ` for detailed instructions. +#. **Fork & Clone the SRW App:** :github-docs:`Fork` the :srw-repo:`ufs-srweather-app repository<>` into your personal GitHub account and :github-docs:`clone` your fork onto your development system if you have not already done so. +#. **Create a branch:** in your clone for your changes. All development should take place on a branch, not on ``develop``. Branches should be named as follows, where ``[name]`` is a one-word description of the branch: + + * ``bugfix/[name]``: Fixes a demonstrably incorrect portion of code + * ``feature/[name]``: Adds a new feature to the code or improves an existing portion of the code + * ``text/[name]``: Changes elements of the repository that do not impact the compiled code in any way (e.g., changes to README, documentation, comments, changing quoted Registry elements, white space alignment). + +#. **Development:** Perform and test changes in the feature branch (not on ``develop``!). Document work in the issue and mention the issue number in commit messages to link your work to the issue (e.g., ``commit -m "Issue #23 - "``). Document changes to the workflow and capabilities in the RST files so that the SRW App documentation stays up-to-date. +#. **Testing:** Test code modifications on as many platforms as possible, and request help with further testing from the code management team when unable to test on all Level 1 platforms. The bare minimum testing required before opening a PR is to run the fundamental (:srw-repo:`tests/WE2E/machine_suites/fundamental `) tests on at least one supported machine (additional testing from the comprehensive suite might be required, depending on the nature of the change). To run the fundamental tests manually, please use the following command in the ``tests/WE2E`` directory: + + .. code-block:: console + + ./run_WE2E_tests.py -t=fundamental -m=your_machine -a=your_account + + where ``your_machine`` is the Tier-1 machine you are running the tests on, and ``your_account`` is the account you charge your computational resources to. See section :numref:`Section %s ` for more detail on SRW App testing. + +#. **Pull Request:** When your work is complete, :github-docs:`create a pull request` to merge your changes. When a PR is initiated, the :ref:`PR template ` autofills. Developers should use the template to provide information about the PR in the proper fields. See the guidelines in the :ref:`Making a Pull Request ` section for more details on making a good pull request. +#. **Merge** - When review and testing are complete, a code manager will merge the PR into ``develop``. PRs that are not ready for merging should have a "Work in Progress" label on them. Users who lack the permissions required to add the label can request in their PR that a code manager do so. +#. **Cleanup** - After the PR is merged, the code developer should delete the branch on their fork and close the issue. Feature branches are intended to be short-lived, concentrated on code with one sole purpose, and applicable to a single PR. A new feature branch should be created when subsequent code development continues. + +.. note:: + + Communication with code managers and the :ref:`repository code management team ` throughout the process is encouraged. + +.. _open-issue: + +Opening an Issue +================= + +All changes to ``ufs-srweather-app`` should be associated with a :srw-repo:`GitHub Issue `. Developers should search the existing issues in the ``ufs-srweather-app`` repository before beginning their work. If an issue does not exist for the work they are doing, they should create one prior to opening a new pull request. If an issue does exist, developers should be sure to collaborate to avoid duplicative work. + +To open an issue, click on :srw-repo:`"New Issue"` within the ``ufs-srweather-app`` GitHub repository. + +Choose from four options: + +#. :srw-repo:`Bug Report `: Report specific problems ("bugs") in the code using the following template: + + .. code-block:: console + + + + Your bug may already be reported! + Please search on the [Issue tracker](https://github.com/ufs-community/ufs-srweather-app/issues) before creating a new issue. + If an issue already exists, please use that issue to add any additional information. + + ## Expected behavior + + + ## Current behavior + + + ## Machines affected + + + + ## Steps To Reproduce + + + ## Detailed Description of Fix (optional) + + + ## Additional Information (optional) + + + ## Possible Implementation (optional) + + + ## Output (optional) + + +#. :srw-repo:`Feature Request `: New features and feature enhancements fall under this category. Propose features and enhancements using the following template. Optional sections may be deleted. + + .. code-block:: console + + + + Your issue may already be reported! + Please search on the [Issue tracker](https://github.com/ufs-community/ufs-srweather-app/issues) before creating a new issue. If an issue already exists, please use that issue to add any additional information. + + ## Description + + + + + ## Solution + + + ## Requirements** + + + ## Acceptance Criteria (Definition of Done) + + + ## Dependencies (optional) + + + + ## Alternative Solutions (optional) + + + +#. :srw-repo:`Text-Only Changes `: Propose text-only changes using the "Text-only request" template. Optional sections may be deleted. + + .. code-block:: console + + ## Description + + + ## Solution + + + ## Alternatives (optional) + + + ## Related to (optional) + + +#. :srw-repo:`Other `: Open a blank issue, and use the "Feature Request" template above as a starting point to describe the issue. + +For all issue reports, indicate whether this is: + #. A problem that you plan to work on and submit a PR for + #. A problem that you will **not** work on but that requires attention + #. A suggested improvement + +Additionally, please add a priority label to the issue (low, medium, or high priority). If you are unable to add labels to your issues, please request that a code manager add a priority label for you. + + * **High priority:** Issues related to a bug fix, a failing test configuration, or an update required for a release (either an operational implementation or public release). + * **Medium priority:** New features that are not required immediately for either an implementation or release + * **Low priority:** Refactoring work or other work that does not seem to be medium or high priority. + +If you are unable to work on the issue and require assistance through :term:`EPIC`, please make sure to include the ``EPIC Support Requested`` label. If the ``EPIC Support Requested`` label is added to a ``high priority`` issue, it might take some time before EPIC will work on the issue, since EPIC management needs to account for and prioritize these issues. However, after seeing that EPIC is required for high priority issues, management will adapt and allocate the necessary resources to assist. After filling out the issue report, click on "Submit new issue." + + +.. _make-pr: + +Making a Pull Request +====================== + +All changes to the SRW App ``develop`` branch should be handled via GitHub’s "Pull Request" (PR) functionality from a branch in the developer's fork. When creating your PR, please follow these guidelines, specific to the ``ufs-srweather-app`` project: + +* Ensure that your PR is targeting the base repository ``ufs-community/ufs-srweather-app`` and an appropriate base branch (usually ``develop``). +* **Complete PR template.** Your PR will appear pre-populated with a :ref:`template ` that you should complete. Provide an informative synopsis of your contribution, then mark appropriate checklist items by placing an "X" between their square brackets. You may tidy up the description by removing boilerplate text and non-selected checklist items. View :ref:`useful PR template guidance ` and information on :ref:`best practices ` for completing each section below. +* **Create draft PR.** Use the pull-down arrow on the green button below the description to initially create a :github-docs:`draft pull request`. + + * Once your draft PR is open, visit its *Files changed* tab and add comments to any lines of code where you think reviewers will benefit from more explanation. Try to save time by proactively answering questions you suspect reviewers will ask. + +* **Open PR.** Once your draft PR is marked up with your comments and ready for review, return to the *Conversation* tab and click the *Ready for review* button. + + * A default set of reviewers will automatically be added to your PR. You may add or request others, if appropriate. Pull requests will be reviewed and approved by at least two code reviewers, at least one of whom must be a code manager. Reviewers may make comments, ask questions, or request changes on your PR. Respond to these as needed, making commits in your clone and pushing to your fork/branch. Your PR will automatically be updated when commits are pushed to its source branch in your fork, so reviewers will immediately see your updates. When a PR has met the contribution and testing requirements and has been approved by two code reviewers, a code manager will merge the PR. + +.. _pr-template: + +PR Template +------------ + +Here is the template that is provided when developers click "Create pull request": + +.. code-block:: console + + - Update develop to head at ufs-community + + - Use this template to give a detailed message describing the change you want to make to the code. + + - You may delete any sections labeled "optional" and any instructions within . + + - If you are unclear on what should be written here, see https://github.com/wrf-model/WRF/wiki/Making-a-good-pull-request-message for some guidance and review the Code Contributor's Guide at https://github.com/ufs-community/ufs-srweather-app/wiki/Code-Manager's-Guide. + + - Code reviewers will assess the PR based on the criteria laid out in the Code Reviewer's Guide (https://github.com/ufs-community/ufs-srweather-app/wiki/Code-Manager's-Guide). + + - The title of this pull request should be a brief summary (ideally less than 100 characters) of the changes included in this PR. Please also include the branch to which this PR is being issued (e.g., "[develop]: Updated UFS_UTILS hash"). + + - Use the "Preview" tab to see what your PR will look like when you hit "Create pull request" + + + # --- Delete this line and those above before hitting "Create pull request" --- + + ## DESCRIPTION OF CHANGES: + + + ### Type of change + + - [ ] Bug fix (non-breaking change which fixes an issue) + - [ ] New feature (non-breaking change which adds functionality) + - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) + - [ ] This change requires a documentation update + + ## TESTS CONDUCTED: + + + + - [ ] derecho.intel + - [ ] gaea.intel + - [ ] hera.gnu + - [ ] hera.intel + - [ ] hercules.intel + - [ ] jet.intel + - [ ] orion.intel + - [ ] wcoss2.intel + - [ ] NOAA Cloud (indicate which platform) + - [ ] Jenkins + - [ ] fundamental test suite + - [ ] comprehensive tests (specify *which* if a subset was used) + + ## DEPENDENCIES: + + + ## DOCUMENTATION: + + + ## ISSUE: + + + ## CHECKLIST + + - [ ] My code follows the style guidelines in the Contributor's Guide + - [ ] I have performed a self-review of my own code using the Code Reviewer's Guide + - [ ] I have commented my code, particularly in hard-to-understand areas + - [ ] My changes need updates to the documentation. I have made corresponding changes to the documentation + - [ ] My changes do not require updates to the documentation (explain). + - [ ] My changes generate no new warnings + - [ ] New and existing tests pass with my changes + - [ ] Any dependent changes have been merged and published + + ## LABELS (optional): + + A Code Manager needs to add the following labels to this PR: + - [ ] Work In Progress + - [ ] bug + - [ ] enhancement + - [ ] documentation + - [ ] release + - [ ] high priority + - [ ] run_ci + - [ ] run_we2e_fundamental_tests + - [ ] run_we2e_comprehensive_tests + - [ ] Needs Cheyenne test + - [ ] Needs Jet test + - [ ] Needs Hera test + - [ ] Needs Orion test + - [ ] help wanted + + ## CONTRIBUTORS (optional): + + +.. _pr-template-guidance: + +PR Template Guidance +--------------------- + +**TITLE:** Titles should start with the branch name in brackets and should give code reviewers a clear idea of what the change will do in approximately 5-10 words. Some good examples: + + * [develop] Make thompson_mynn_lam3km ccpp suite available + * [release/public-v2] Add a build_linux_compiler modulefile + * [develop] Fix module loads on Hera + * [develop] Add support for Rocoto with generic LINUX platform + +All of the above examples concisely describe the changes contained in the pull request. The title will not get cut off in emails and web pages. In contrast, here are some made-up (but plausible) examples of BAD pull request titles: + + * Bug fixes (Bug fixes on what part of the code?) + * Changes to surface scheme (What kind of changes? Which surface scheme?) + +**DESCRIPTION OF CHANGES:** The first line of the description should be a single-line "purpose" for this change. Note the type of change (i.e., bug fix, feature/enhancement, text-only). Summarize the problem, proposed solution, and required changes. If this is an enhancement or new feature, describe why the change is important. + +**DOCUMENTATION:** Developers should include documentation on new capabilities and enhancements by updating the appropriate RST documentation files in their fork prior to opening the PR. These documentation updates should be noted in the "Documentation" section of the PR message. If necessary, contributors may submit the RST documentation in a subsequent PR. In these cases, the developers should include any existing documentation in the "Documentation" section of the initial PR message or as a file attachment to the PR. Then, the contributor should open an issue reflecting the need for official RST documentation updates and include the issue number and explanation in the "Documentation" section of the initial PR template. + +.. _tips-best-practices: + +Tips, Best Practices, and Protocols to Follow When Issuing a PR +----------------------------------------------------------------- + +* **Label PR status appropriately.** If the PR is not completely ready to be merged, please add a "Work In Progress" label. Urgent PRs should be marked "high priority." All PRs should have a type label (e.g., "bug," "enhancement"). Labels can be added on the right-hand side of a submitted PR request by clicking on the gear icon beside "Labels" (below the list of reviewers). If users do not have the permissions to add a label to their PR, they should request in their PR description that a code manager add the appropriate labels. +* **Indicate urgency.** If a PR is particularly urgent, this information should be provided in the PR "Description" section, and multiple code management team members should be tagged to draw attention to this PR. After submitting the PR, a "high priority" label should be added to it. +* **Indicate the scope of the PR.** If the PR is extremely minor (e.g., change to the README file), indicate this in the PR message. If it is an extensive PR, the developer should test it on as many platforms as possible and stress the necessity that it be tested on systems for which they do not have access. +* **Clarify in the PR message where the code has been tested.** At a minimum, code should be tested on the platform where code modification has taken place. It should also be tested on machines where code modifications will impact results. If the developer does not have access to these platforms, this should be noted in the PR. +* **Follow separation of concerns.** For example, module loads are only handled in the appropriate modulefiles, Rocoto always sets the work directory, j-jobs make the work directory, and ex-scripts require the work directory to exist. +* **Target subject matter experts (SMEs) among the code management team.** When possible, tag team members who are familiar with the modifications made in the PR so that the code management team can provide effective and streamlined PR reviews and approvals. Developers can tag SMEs by selecting the gear icon next to "Assignees" (under the Reviewers list) and adding the appropriate names. +* **Schedule a live code review** if the PR is exceptionally complex in order to brief members of the code management team on the PR either in-person or through a teleconference. Developers should indicate in the PR message that they are interested in a live code review if they believe that it would be beneficial. + +Merging +======== + +Your PR is ready to merge when: + +#. It has been approved by a required number of ``ufs-srweather-app`` reviewers, including at least one code manager. +#. All conversations have been marked as resolved. +#. All required checks have passed. + +These criteria and their current statuses are detailed in a section at the bottom of your PR's *Conversation* tab. Checks take some time to run, so please be patient. + +In general, the lead code manager will merge the PR when ready. Developers with write permissions should not merge their code themselves unless instructed otherwise by the lead code manager. + +Need Help? +=========== + +See the :ref:`User Support ` section for an overview of user support options. For assistance directly related to a PR, please use comments in the *Conversation* tab of your PR to ask for help with any difficulties you encounter! diff --git a/doc/ContribGuide/documentation.rst b/doc/ContribGuide/documentation.rst new file mode 100644 index 0000000000..9e0bad6bda --- /dev/null +++ b/doc/ContribGuide/documentation.rst @@ -0,0 +1,72 @@ +.. _doc-guidelines: + +Documentation +============= + +.. note:: + + Thank you to the Unified Workflow (UW) team for allowing us to adapt their documentation guidance for use in the SRW App. The original can be viewed in the `uwtools` :uw:`documentation `. + + +Locally Building and Previewing Documentation +--------------------------------------------- + +To locally build the docs: + +#. Install ``sphinx``, ``sphinx-rtd-theme``, and ``sphinxcontrib-bibtex`` on your system if they are not already installed. +#. From the root of your clone: ``cd doc`` +#. Build the docs: ``make doc`` + +The ``make doc`` command will build the documentation under ``doc/build/html``, after which you can preview them in your web browser at the URL: + +.. code-block:: text + + file:///doc/build/html/index.html + +Rerun ``make doc`` and refresh your browser after making and saving changes. + +Viewing Online Documentation +---------------------------- + +Online documentation generation and hosting for the SRW App is provided by :rtd:`Read the Docs<>`. The green *View Docs* button near the upper right of that page links to the official docs for the project. When viewing the documentation, the version selector at the bottom of the navigation column on the left can be used to switch between the latest development code (``develop``), the latest released version (``latest``), and any previously released version. + +Docs are also built and temporarily published when Pull Requests (PRs) targeting the ``develop`` branch are opened. Visit the :rtd:`Builds page` to see recent builds, including those made for PRs. Click a PR-related build marked *Passed*, then the small *View docs* link (**not** the large green *View Docs* button) to see the docs built specifically for that PR. If your PR includes documentation updates, it may be helpful to include the URL of this build in your PR's description so that reviewers can see the rendered HTML docs and not just the modified ``.rst`` files. Note that if commits are pushed to the PR's source branch, Read the Docs will rebuild the PR docs. See the checks section near the bottom of a PR for current status and for another link to the PR docs via the *Details* link. + +.. COMMENT: Technically, docs are built when any PR is opened, regardless of branch. Look into changing. + +Documentation Guidelines +------------------------ + +Please follow these guidelines when contributing to the documentation: + +* Keep formatting consistent across pages. Update all pages when better ideas are discovered. Otherwise, follow the conventions established in existing content. +* Ensure that the ``make doc`` command completes with no errors or warnings. +* If the link-check portion of ``make doc`` reports that a URL is ``permanently`` redirected, update the link in the docs to use the new URL. Non-permanent redirects can be left as-is. +* Do not manually wrap lines in the ``.rst`` files. Insert newlines only as needed to achieve correctly formatted HTML, and let HTML wrap long lines and/or provide a scrollbar. +* Use one blank line between documentation elements (headers, paragraphs, code blocks, etc.) unless additional lines are necessary to achieve correctly formatted HTML. +* Remove all trailing whitespace. +* In general, avoid pronouns like "we" and "you". (Using "we" may be appropriate when synonymous with "The SRW Code Management Team", "The UFS Community", etc., when the context is clear.) Prefer direct, factual statements about what the code does, requires, etc. +* Use the `Oxford Comma `__. +* Follow the :rst:`RST Sections` guidelines, underlining section headings with = characters, subsections with - characters, and subsubsections with ^ characters. If a further level of refinement is needed, use " to underline paragraph headers. +* In [[sub]sub]section titles, capitalize all "principal" words. In practice this usually means all words but articles (a, an, the), logicals (and, etc.), and prepositions (for, of, etc.). Always fully capitalize acronyms (e.g., YAML). +* Never capitalize proper names when their owners do not (e.g., write `"pandas" `__, not "Pandas", even at the start of a sentence) or when referring to a software artifact (e.g., write ``numpy`` when referring to the library, and "NumPy" when referring to the project). +* When referring to YAML constructs, ``block`` refers to an entry whose values is a nested collection of key/value pairs, while ``entry`` is a single key/value pair. +* When using the ``.. code-block::`` directive, align the actual code with the word ``code``. Also, when ``.. code-block::`` directives appear in bulleted or numberd lists, align them with the text following the space to the right of the bullet/number. Include a blank line prior to the coe content. For example: + + .. code-block:: text + + * Lorem ipsum + + .. code-block:: python + + n = 88 + + or + + .. code-block:: text + + #. Lorem ipsum + + .. code-block:: python + + n = 88 \ No newline at end of file diff --git a/doc/ContribGuide/git-submodules.rst b/doc/ContribGuide/git-submodules.rst new file mode 100644 index 0000000000..42e7616cfe --- /dev/null +++ b/doc/ContribGuide/git-submodules.rst @@ -0,0 +1,225 @@ +============================ +Working with Git Submodules +============================ + +.. note:: + + Thank you to Janet Derrico (@jderrico-noaa) [#f1]_ for authoring the summary of Git submodules on which this chapter is based. [#f2]_ It has been adapted slightly for use in the SRW App. + +What Are Git Submodules? +========================= + +Git submodules are pointers to other Git repositories. They enable developers to include external repositories as a subdirectory within their main project. This is particularly useful when a project depends on external libraries or components that are developed and maintained in separate repositories. + +Key Benefits +============= + +* **Version Control:** Submodules link to specific commits in external repositories, ensuring consistency and predictability. Developers can control exactly which version of an external repository their project depends on. +* **Separate Development:** Changes to submodules are tracked separately from the main repository, allowing for independent development of external dependencies. +* **Collaborative Workflows:** Multiple teams can work on different parts of a larger project simultaneously without interference, each with its own repository (e.g. changes to ``ccpp-physics`` can be developed at the same time as changes to ``ufs-weather-model``). + +How Submodules Are Linked +========================== + +Git knows which submodules to check out based on two key pieces of information: the submodule pointer, and the information on where to find that pointer. The pointer is a commit reference---when you add a submodule to your repository, Git doesn't just store the URL; it also records a specific commit hash from that submodule. The commit hash is what Git uses to know which exact state of the submodule to checkout. These commit references are stored in the main repository and are updated whenever a change is committed in the submodule. When you run ``git submodule update``, Git checks out the commit of each submodule according to what is recorded in the main repository. The ``.gitmodules`` file tracks where to find this information, storing the submodule's path within your repository and its corresponding URL. + +If you commit a hash in a submodule but push to a different fork, then Git will add the new submodule hash to the supermodule, which will result in a Git error when trying to recursively check out the supermodule. + +Adding a Submodule +=================== + +You can add a submodule to your repository using ``git submodule add ``. This clones the external repository to the specified path and adds a new entry in a special file named ``.gitmodules``. + +Cloning a Repository with Submodules +===================================== +When cloning a repository that has submodules, use git clone --recursive to ensure that all submodules are also cloned. + +Updating a Submodule +====================== + +To update a submodule, navigate into the submodule directory, check out the desired commit or branch, and then go back to the main repository to commit this change. Here is an example for making a change to ``ccpp-physics``, ``fv3``, and ``ufs-weather-model``. Since ``ccpp-phsyics`` is a submodule of ``fv3atm`` and ``ufs-weather-model``, a change to ``ccpp-physics`` requires PRs to all three repositories. +This method requires two remotes on your local workspace: the authoritative (e.g., ``ufs-community/ufs-weather-model``) and the personal fork you push to (e.g., ``jderrico-noaa/ufs-weather-model``). The steps involved are: + +#. Clone locally +#. Create your working branches +#. Commit your changes +#. Push your working branches to your personal fork +#. Submit PRs from personal fork to authoritative + +Cloning the Authoritative Repository and Adding Your Personal Fork +-------------------------------------------------------------------- + +Clone the authoritative repository to your local workspace: + +.. code-block:: console + + git clone --recursive -b branch-name https://github.com/ufs-community/ufs-weather-model + cd ufs-weather-model + +where ``branch-name`` is the name of the branch you want to clone (usually ``develop``). + +Adding Your Personal Fork as a Remote Repository +-------------------------------------------------- + +.. code-block:: console + + git remote add my-fork + +where ``my-fork`` is the name of your fork. You can name your fork whatever you want as long as you can distinguish it from the authoritative (e.g., janet) https://github.com//ufs-weather-model + +Run: + +.. code-block:: console + + git remote -v + +to show the remote repositories that have been added to your local copy of ``ufs-weather-model``, if should show origin (the authoritative ufs-community repo) and my-fork (your personal fork that you push changes to) +The local repository for ufs-weather-model has been created. This process is repeated for the other submodules (``fv3atm`` and ``ccpp-physics``, where the code will be modified): + +.. code-block:: console + + cd FV3 + git remote add my-fork https://github.com//fv3atm + cd ccpp/physics + git remote add my-fork https://github.com//ccpp-physics + +Create Working Branches +------------------------ + +The next step is to create working branches that will hold your changes until they are merged. From ``ccpp-physics``, navigate up to ``ufs-weather-model``. It is good practice to checkout the main branch (e.g., ``develop``) to ensure that you are working with the latest updates and then create your working branch. You will do this all the way down: + + +Then, navigate from ``ccpp/physics`` back to to ``ufs-weather-model`` and create a new branch to hold your changes: + +.. code-block:: console + + cd ../../.. + git checkout -b working_branch + +This command creates a new branch named ``working_branch``; in practice the branch name should be more descriptive and reflect the development it will be holding. Follow the same process for the Git submodules you will be working in: + +.. code-block:: console + + cd FV3 + git checkout develop + git checkout -b working_branch + cd ccpp/physics + git checkout ufs/dev + git checkout -b working_branch + +Commit Changes and Push Working Branches +------------------------------------------ + +As you make changes to the code, you should commit often. This ensures that all of your development is tracked (so you don't lose anything) and makes it easier to go back to a working version if one of your changes breaks things (it happens!). Commit messages should be descriptive of the changes they contain. + +To push your working branches to your fork from the top down, navigate to the ``ufs-weather model`` directory. Then run: + +.. code-block:: console + + git push -u my-fork working_branch + +The ``-u`` flag here tells Git to set ``my-fork/working_branch`` as the default remote branch for ``working_branch``. After executing this command, you can simply use ``git push`` or ``git pull`` while on ``working_branch``, and Git will automatically know to push or pull from ``my_fork/working_branch``. + +Continue this process with the other submodule repositories: + +.. code-block:: console + + cd FV3 + git push -u my-fork working_branch + cd ccpp/physics + git push -u my-fork working_branch + +All working changes are now in your personal fork. + +Submitting PRs +--------------- + +When working with Git submodules, developers must submit individual pull requests to each repository where changes were made and link them to each other. In this case, developers would submit PRs to ``ufs-weather-model``, ``fv3atm``, and ``ccpp-physics``. There are several steps to this process: opening the PR, updating the submodules, and creating new submodule pointers. Each authoritative repository should have its own PR template that includes space to link to the URLs of related PRs. If for some reason this is not the case, developers should link to the related PRs in the "Description" section of their PR. + +Updating the Submodules +^^^^^^^^^^^^^^^^^^^^^^^^ + +When changes are made to the authoritative repositories while you are developing or while your PR is open, you need to update the PR to include those updates. From your local workspace, navigate to ``ufs-weather-model`` and run: + +.. code-block:: console + + git checkout develop + git pull origin develop + git checkout working_branch + git merge develop + git push -u my-fork working_branch + +This will check out the ``develop`` branch, retrieve the latest updates, then check out the ``working_branch`` and merge the latest changes from ``develop`` into it. After pushing the changes on ``working_branch`` to your personal fork, your PR will update automatically. This process must then be repeated for the other components (e.g., ``fv3`` and ``ccpp-physics``). It is important to check that you are merging the correct branch---for example, the main development branch in ``ufs-community/ccpp-physics`` is ``ufs/dev``, so you would checkout/pull ``ufs/dev`` instead. + +.. note:: + + If you have already pushed ``working_branch`` to ``my-fork`` using the ``-u`` flag, you can omit the flag and fork specification, but it doesn't hurt to use them. + +Add Submodule Pointers +^^^^^^^^^^^^^^^^^^^^^^^ +To create submodule pointers, developers will navigate to the lowest submodule directory (rather than going from the top down) to create pointers linking the submodule to the supermodule. In this example, we are using *ufs-weather-model → fv3 → ccpp-physics*, so developers would start by navigating to ``ccpp-physics``. Once your PR to ``ccpp-physics`` is merged, you then need to update your PRs to ``fv3`` and ``ufs-weather-model`` so that they point to the updated ``ccpp-physics`` submodule. + +First, update the local copy of ``ccpp-physics`` with what was merged to the authoritative (e.g., your changes): + +.. code-block:: console + + git checkout ufs/dev + git pull origin ufs/dev + +Then navigate to ``fv3atm``: + +.. code-block:: console + + cd ../.. + +If you were working with other submodules, you would navigate to submodule above the lowest here. Then create the submodule pointer, commit the change, and push it to your fork of ``fv3atm``: + +.. code-block:: console + + git checkout working_branch + git add ccpp/physics + git commit -m "update submodule pointer for ccpp-physics" + git push -u my-fork working_branch + +Once again, pushing to your personal fork will automatically update the PR that includes ``working_branch``. + +The ``fv3atm`` code managers will then merge your ``fv3atm`` PR, at which point only the ``ufs-weather-model`` PR will require a submodule pointer update. From your local workspace, navigate to the ``fv3`` directory (``ufs-weather-model/FV3``) and update the local copy of ``fv3atm`` with what was just merged into the authoritative: + +.. code-block:: console + + git checkout develop + git pull origin develop + +Then, navigate up to ``ufs-weather model`` directory, check out the working branch, and add the submodule pointer for ``fv3atm``. Commit and push the changes to your personal fork. + +.. code-block:: console + + cd .. + git checkout working_branch + git add FV3 + git commit -m "update submodule pointer for fv3atm" + git push -u my-fork + +The UFS code managers will then test and merge the ``ufs-weather-model`` PR. + +Switching Branches With Submodules +==================================== + +If you are working off a branch that has different versions (or commit references/pointers) of submodules, it is important to synchronize the submodules correctly. From the supermodule, you would switch to your desired branch and then update the submodules. For example, if you want to work on a different branch of the ``ufs-weather-model`` repository: + +.. code-block:: console + + git checkout desired_branch + git submodule update --init --recursive + +Here, ``--init`` initializes any submodules that have not yet been initialized, while ``--recursive`` ensures that all nested submodules (e.g., ``fv3atm``) are updated. If you know there have been upstream changes to a submodule, and you want to incorporate these latest changes, you would go into each submodule directory and pull the changes: + +.. code-block:: console + + cd path/to/submodule + git pull origin + +When working with submodules, it is best practice to always run ``git submodule update --init --recursive`` after switching branches. Changes to submodules need to be committed and pushed separately within their respective repositories (see sections above). + +.. [#f1] of NOAA Global Systems Laboratory (GSL) and Coorperative Institute for Research in Environmental Sciences (CIRES) +.. [#f2] with the assistance of Grant Firl, Joseph Olson, and ChatGPT \ No newline at end of file diff --git a/doc/ContribGuide/index.rst b/doc/ContribGuide/index.rst new file mode 100644 index 0000000000..c4adfc997d --- /dev/null +++ b/doc/ContribGuide/index.rst @@ -0,0 +1,13 @@ +Contributor's Guide +====================== + +.. toctree:: + :maxdepth: 3 + + introduction + contributing + code-configuration-standards + testing + git-submodules + documentation + diff --git a/doc/ContribGuide/introduction.rst b/doc/ContribGuide/introduction.rst new file mode 100644 index 0000000000..687dc2ea25 --- /dev/null +++ b/doc/ContribGuide/introduction.rst @@ -0,0 +1,31 @@ +================= +Introduction +================= + +Background +============ + +Authoritative Branch +---------------------- + +The ``ufs-srweather-app`` repository maintains a main branch for development called ``develop``. The HEAD of ``develop`` reflects the latest development changes. It points to regularly updated hashes for individual subcomponents. Pull requests (PRs) are typically merged to ``develop``. + +The ``develop`` branch is protected by the code management team: + + #. Pull requests for this branch require approval by at least two code reviewers. + #. A code manager should perform at least one of the reviews and the merge, but other contributors are welcome to provide comments/suggestions. + +.. _rcm-team: + +Repository Code Management Team +--------------------------------- + +Scientists and engineers from multiple labs and organizations have volunteered to review pull requests for the ``develop`` branch: + +.. csv-table:: + :file: ../tables/code-managers.csv + :widths: auto + :delim: ; + :header-rows: 1 + + diff --git a/doc/ContribGuide/testing.rst b/doc/ContribGuide/testing.rst new file mode 100644 index 0000000000..b296a3f90a --- /dev/null +++ b/doc/ContribGuide/testing.rst @@ -0,0 +1,70 @@ +.. _pr-testing: + +======== +Testing +======== + +The ``ufs-srweather-app`` repository uses the established workflow end-to-end (WE2E) testing framework (see :ref:`WE2E tests `) to implement two tiers of testing: fundamental and comprehensive. *Fundamental testing* consists of a lightweight set of tests that can be automated and run regularly on each :srw-wiki:`Level 1 ` platform. These tests verify that there are no major, obvious faults in the underlying code when running common combinations of grids, input data, and physics suites. *Comprehensive testing* includes the entire set of WE2E tests and covers a broader range of capabilities, configurations, and components. Eventually, new categories of tests will be added, including regression tests and unit tests. + +Before opening a PR, a minimum set of tests should be run: + + * Developers must run the fundamental test suite manually on at least one supported platform and report on the outcome in the PR template. Developers should test code modifications on as many platforms as possible. + + * To run the fundamental tests manually, run the following command from the ``tests/WE2E`` directory: + + .. code-block:: console + + ./run_WE2E_tests.py -t=fundamental -m=your_machine -a=your_account + + where ``your_machine`` is the Tier-1 machine you are running the tests on, and ``your_account`` is the account you charge your computational resources to. Refer to the :ref:`WE2E Tests ` chapter of the User's Guide for more detail on how to run SRW App tests. + + * Developers will not be required to run tests on *all* supported platforms, but if a failure is pointed out by another reviewer (or by automated testing), then it is expected that the developer will work with reviewers and code managers to ensure that the problem is resolved prior to merging. + + * If the PR impacts functionality contained within comprehensive WE2E tests not included in the fundamental test suite, the developer must run those tests on the PR. + * Any new functionality must be tested explicitly, and any new tests should be described in detail in the PR message. Depending on the impact of this functionality, new tests should be added to the suite of comprehensive WE2E tests, followed by a discussion with code managers on whether they should also be included as fundamental tests. + + * In some cases, it may be possible to modify a current test instead of creating a completely new test. Code developers introducing new capabilities should work with code managers to provide the proper configuration files, data, and other information necessary to create new tests for these capabilities. + + * When the above tests are complete and the PR has been approved by at least one code manager, a code manager will add the ``run_we2e_coverage_tests`` label to initiate fundamental testing on all Level 1 platforms via the Jenkins CI/CD pipeline. + +Testing on Jenkins +=================== + +`Jenkins `__ is an "open source automation server" that automates code testing. For the Jenkins automated testing labels, it should be noted that **ONLY** code managers should apply these labels and only after at least one code manager has given approval to the PR. The PR will not be merged until all Jenkins-based builds and testing have successfully passed. + +The following automated testing labels are available for the SRW App: + + * ``run_we2e_coverage_tests`` + * *Coming Soon:* ``run_we2e_comprehensive_tests`` + +Due to a security issue on Jenkins, where all Jenkins usernames are exposed, access to Jenkins logs through the Jenkins API has been disabled for the public. However, users can visit the `EPIC Health Dashboard `__ and click the *Jenkins Artifacts* tab to access the log files for their PR. On that page, users can identify their PR number, pull the ``we2e_test_logs-{machine}-{compiler}.tgz`` file (where ``{machine}`` is the Tier-1 platform that failed and ``{compiler}`` is the compiler used for the failed test), untar and ungzip the file, and look through the logs from the test that failed. + +Additionally, users can potentially access the directories where the Jenkins tests are run on the various machines so that they can view the tests, monitor progress, and investigate failures. The locations of the experiment directories on the various machines are as follows: + +.. list-table:: + :header-rows: 1 + + * - Tier-1 Platform + - Location of Jenkins experiment directories + * - Derecho + - /glade/derecho/scratch/epicufsrt/jenkins/workspace + * - Gaea + - /lustre/f2/dev/wpo/role.epic/jenkins/workspace/fs-srweather-app_pipeline_PR-#/gaea + * - Gaea C5 + - /lustre/f2/dev/wpo/role.epic/jenkins/workspace/fs-srweather-app_pipeline_PR-#/gaea-c5 + * - Hera (Intel) + - /scratch2/NAGAPE/epic/role.epic/jenkins/workspace/fs-srweather-app_pipeline_PR-#__2/hera + * - Hera (GNU) + - /scratch2/NAGAPE/epic/role.epic/jenkins/workspace/fs-srweather-app_pipeline_PR-#/hera + * - Hercules + - /work/noaa/epic/role-epic/jenkins/workspace/fs-srweather-app_pipeline_PR-#/hercules + * - Jet + - /lfs1/NAGAPE/epic/role.epic/jenkins/workspace/fs-srweather-app_pipeline_PR-#/jet + * - Orion + - /work/noaa/epic/role-epic/jenkins/workspace/fs-srweather-app_pipeline_PR-#/orion + +where ``#`` is the PR number. + +If the Jenkins tests fail, then the developer will need to make the necessary corrections to their PR. Unfortunately, removing and adding the label back will not kick off the Jenkins test again. Instead, the job will need to be manually re-run through Jenkins (by a member of the EPIC team). + + diff --git a/doc/INSTALL b/doc/INSTALL index e53044f6ad..53dc159bbd 100644 --- a/doc/INSTALL +++ b/doc/INSTALL @@ -12,7 +12,7 @@ git clone https://github.com/ufs-community/ufs-srweather-app.git cd ufs-srweather-app/ ./manage_externals/checkout_externals -# We can build ufs-sreweather-app binaries in two ways. +# We can build ufs-srweather-app binaries in two ways. # Method 1 # ======== diff --git a/doc/UsersGuide/Makefile b/doc/Makefile similarity index 84% rename from doc/UsersGuide/Makefile rename to doc/Makefile index 84c77bbfa2..c91f2f147b 100644 --- a/doc/UsersGuide/Makefile +++ b/doc/Makefile @@ -2,7 +2,7 @@ SPHINXOPTS = -a -n #-W SPHINXBUILD = sphinx-build -SOURCEDIR = source +SOURCEDIR = . BUILDDIR = build LINKCHECKDIR = $(BUILDDIR)/linkcheck @@ -12,12 +12,13 @@ LINKCHECKDIR = $(BUILDDIR)/linkcheck help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) -docs: +doc: + make clean $(MAKE) linkcheck $(MAKE) html linkcheck: - make clean && $(SPHINXBUILD) -b linkcheck $(SPHINXOPTS) $(SOURCEDIR) $(LINKCHECKDIR) + $(SPHINXBUILD) -b linkcheck $(SPHINXOPTS) $(SOURCEDIR) $(LINKCHECKDIR) # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). diff --git a/doc/UsersGuide/README b/doc/README similarity index 89% rename from doc/UsersGuide/README rename to doc/README index 0ad8948eda..017f865384 100644 --- a/doc/UsersGuide/README +++ b/doc/README @@ -20,10 +20,11 @@ Steps to build and use the Sphinx documentation tool: To build html: -$ cd ufs-srweather-app/docs/UsersGuide -$ make clean && sphinx-build -b html source build +$ cd ufs-srweather-app/doc +$ make clean && sphinx-build -b html . build The "make html" command can often be used in place of the previous command. +"make doc" will both build the html and run the linkchecker. Sphinx uses Latex to export the documentation as a PDF file. To build pdf: diff --git a/doc/UsersGuide/source/BackgroundInfo/CCPPUpdates.rst b/doc/UsersGuide/BackgroundInfo/CCPPUpdates.rst similarity index 100% rename from doc/UsersGuide/source/BackgroundInfo/CCPPUpdates.rst rename to doc/UsersGuide/BackgroundInfo/CCPPUpdates.rst diff --git a/doc/UsersGuide/source/BackgroundInfo/Components.rst b/doc/UsersGuide/BackgroundInfo/Components.rst similarity index 98% rename from doc/UsersGuide/source/BackgroundInfo/Components.rst rename to doc/UsersGuide/BackgroundInfo/Components.rst index d861304502..559576725d 100644 --- a/doc/UsersGuide/source/BackgroundInfo/Components.rst +++ b/doc/UsersGuide/BackgroundInfo/Components.rst @@ -22,7 +22,7 @@ UFS Preprocessing Utilities (UFS_UTILS) The SRW Application includes a number of pre-processing utilities (UFS_UTILS) that initialize and prepare the model. Since the SRW App provides forecast predictions over a limited area (rather than globally), these utilities generate a regional grid (``regional_esg_grid/make_hgrid``) along with :term:`orography` (``orog``) and surface climatology (``sfc_climo_gen``) files on that grid. Grids include a strip, or "halo," of six cells that surround the regional grid and feed in lateral boundary condition data. Since different grid and orography files require different numbers of :term:`halo` cells, additional utilities handle topography filtering and shave the number of halo points (based on downstream workflow component requirements). The pre-processing software :term:`chgres_cube` is used to convert the raw external model data into initial and lateral boundary condition files in :term:`netCDF` format. These are needed as input to the :term:`FV3` limited area model (:term:`LAM`). Additional information about the UFS pre-processing utilities can be found in the :doc:`UFS_UTILS Technical Documentation ` and in the `UFS_UTILS Scientific Documentation `__. -The SRW Application can be initialized from a range of operational initial condition files. It is possible to initialize the model from the Global Forecast System (:term:`GFS`), North American Mesoscale (:term:`NAM`) Forecast System, Rapid Refresh (:term:`RAP`), and High-Resolution Rapid Refresh (:term:`HRRR`) files in Gridded Binary v2 (:term:`GRIB2`) format. GFS files also come in :term:`NEMSIO` format for past dates. +The SRW Application can be initialized from a range of operational initial condition files. It is possible to initialize the model from the Global Forecast System (:term:`GFS`), North American Mesoscale (:term:`NAM`) Forecast System, Rapid Refresh (:term:`RAP`), High-Resolution Rapid Refresh (:term:`HRRR`), and Rapid Refresh Forecast System (:term:`RRFS`) files in Gridded Binary v2 (:term:`GRIB2`) format. GFS files also come in :term:`NEMSIO` format for past dates. .. WARNING:: For GFS data, dates prior to 1 January 2018 may work but are not guaranteed. Public archives of model data can be accessed through the `NOAA Operational Model Archive and Distribution System `__ (NOMADS). Raw external model data may be pre-staged on disk by the user. @@ -89,7 +89,7 @@ For more information on NEXUS, visit the GitHub repository at https://github.com Unified Workflow Tools ======================== -The Unified Workflow (UW) is a set of tools intended to unify the workflow for various UFS applications under one framework. The UW toolkit currently includes templater and configuration (config) tools, which have been incorporated into the SRW App workflow and will soon be incorporated into other UFS repositories. Additional tools are under development. More details about UW tools can be found in the `uwtools `__ GitHub repository and in the :doc:`UW Documentation `. +The Unified Workflow (UW) is a set of tools intended to unify the workflow for various UFS applications under one framework. The UW toolkit currently includes templater and configuration (config) tools, which have been incorporated into the SRW App workflow and will soon be incorporated into other UFS repositories. Additional tools are under development. More details about UW tools can be found in the `uwtools `__ GitHub repository and in the :uw:`UW Documentation <>`. Build System and Workflow ========================= diff --git a/doc/UsersGuide/source/BackgroundInfo/Introduction.rst b/doc/UsersGuide/BackgroundInfo/Introduction.rst similarity index 78% rename from doc/UsersGuide/source/BackgroundInfo/Introduction.rst rename to doc/UsersGuide/BackgroundInfo/Introduction.rst index 4c6379e295..f1a384e025 100644 --- a/doc/UsersGuide/source/BackgroundInfo/Introduction.rst +++ b/doc/UsersGuide/BackgroundInfo/Introduction.rst @@ -11,28 +11,30 @@ The UFS includes `multiple applications `__) and support for the ``RRFS_NA_13km`` predefined grid - * Addition of ``FV3_GFS_v17_p8`` physics suite (`PR #574 `__) + * Addition of the supported ``FV3_RAP`` physics suite (:srw-repo:`PR #811 `) and support for the ``RRFS_NA_13km`` predefined grid + * Addition of ``FV3_GFS_v17_p8`` physics suite (:srw-repo:`PR #574 `) * Updates to :term:`CCPP` that target the top of the ``main`` branch (which is ahead of CCPP v6.0.0). See :ref:`this page ` for a detailed summary of updates that came in ahead of the v2.2.0 release. - * Expansion of :srw-wiki:`Level 1 platforms ` to include Derecho, Hercules, and Gaea C5 (PRs `#894 `__, `#898 `__, `#911 `__) - * Transition to spack-stack modulefiles for most supported platforms to align with the UFS WM shift to spack-stack (PRs `#913 `__ and `#941 `__) - * Overhaul of the WE2E testing suite (see, e.g., PRs `#686 `__, `#732 `__, `#864 `__, `#871 `__) - * Improvements to the CI/CD automated testing pipeline (see, e.g., PRs `#707 `__ and `#847 `__) - * Incorporation of additional METplus verification capabilities (PRs `#552 `__, `#614 `__, `#757 `__, `#853 `__) - * Integration of the Unified Workflow's templater tool (`PR #793 `__) - * Ability to create a user-defined custom workflow (`PR #676 `__) - * Option to use a custom vertical coordinate file with different distribution of vertical layers (`PR #813 `__) and :ref:`documentation on how to use this feature ` (`PR #888 `__) - * Incorporation of plotting tasks into the workflow (PR `#482 `__); addition of ability to plot on both CONUS and smaller regional grid (`PR #560 `__) - * Addition of a sample verification case (`PR #500 `__) with :ref:`documentation ` - * A new :ref:`tutorial chapter ` in the documentation (`PR #584 `__) - * Incorporation of `UFS Case Studies `__ within the WE2E framework (PRs `#736 `__ and `#822 `__) - * Air Quality Modeling (AQM) capabilities (unsupported but available; see `PR #613 `__) + * Expansion of :srw-wiki:`Level 1 platforms ` to include Derecho, Hercules, and Gaea C5 (PRs :srw-repo:`#894 `, :srw-repo:`#898 `, :srw-repo:`#911 `) + * Transition to spack-stack modulefiles for most supported platforms to align with the UFS WM shift to spack-stack (PRs :srw-repo:`#913 ` and :srw-repo:`#941 `) + * Overhaul of the WE2E testing suite (see, e.g., PRs :srw-repo:`#686 `, :srw-repo:`#732 `, :srw-repo:`#864 `, :srw-repo:`#871 `) + * Improvements to the CI/CD automated testing pipeline (see, e.g., PRs :pull/707>` and :srw-repo:`#847 `) + * Incorporation of additional METplus verification capabilities (PRs :srw-repo:`#552 `, :srw-repo:`#614 `, :srw-repo:`#757 `, :srw-repo:`#853 `) + * Integration of the Unified Workflow's templater tool (:srw-repo:`PR #793 `) + * Ability to create a user-defined custom workflow (:srw-repo:`PR #676 `) + * Option to use a custom vertical coordinate file with different distribution of vertical layers (:srw-repo:`PR #813 `) and :ref:`documentation on how to use this feature ` (:srw-repo:`PR #888 `) + * Incorporation of plotting tasks into the workflow (PR :srw-repo:`#482 `); addition of ability to plot on both CONUS and smaller regional grid (:srw-repo:`PR #560 `) + * Addition of a sample verification case (:srw-repo:`PR #500 `) with :ref:`documentation ` + * A new :ref:`tutorial chapter ` in the documentation (:srw-repo:`PR #584 `) + * Incorporation of `UFS Case Studies `__ within the WE2E framework (PRs :srw-repo:`#736 ` and :srw-repo:`#822 `) + * Air Quality Modeling (AQM) capabilities (unsupported but available; see :srw-repo:`PR #613 `) * Miscellaneous documentation updates to reflect the changes above The SRW App |latestr| citation is as follows and should be used when presenting results based on research conducted with the App: UFS Development Team. (2023, Oct. 31). Unified Forecast System (UFS) Short-Range Weather (SRW) Application (Version v2.2.0). Zenodo. https://doi.org/10.5281/zenodo.10015544 +.. _ug-organization: + User's Guide Organization ============================ @@ -80,6 +82,7 @@ Reference Information * :numref:`Section %s: FAQ ` answers users' frequently asked questions. * :numref:`Section %s: Glossary ` defines important terms related to the SRW App. +.. _doc-conventions: SRW App Documentation Conventions =================================== @@ -96,6 +99,8 @@ Code that includes angle brackets (e.g., ``build__``) indica File or directory paths that begin with ``/path/to/`` should be replaced with the actual path on the user's system. For example, ``/path/to/modulefiles`` might be replaced by ``/Users/Jane.Smith/ufs-srweather-app/modulefiles``. +.. _component-docs: + Component Documentation ========================= @@ -142,8 +147,10 @@ A list of available component documentation is shown in :numref:`Table %s `__, since many "bugs" do not require a code change/fix --- instead, the user may be unfamiliar with the system and/or may have misunderstood some component of the system or the instructions, which is causing the problem. Asking for assistance in a `GitHub Discussion `__ post can help clarify whether there is a simple adjustment to fix the problem or whether there is a genuine bug in the code. Users are also encouraged to search `open issues `__ to see if their bug has already been identified. If there is a genuine bug, and there is no open issue to address it, users can report the bug by filing a `GitHub Issue `__. +If users (especially new users) believe they have identified a bug in the system, it is recommended that they first ask about the problem in :srw-repo:`GitHub Discussions `, since many "bugs" do not require a code change/fix --- instead, the user may be unfamiliar with the system and/or may have misunderstood some component of the system or the instructions, which is causing the problem. Asking for assistance in a :srw-repo:`GitHub Discussion ` post can help clarify whether there is a simple adjustment to fix the problem or whether there is a genuine bug in the code. Users are also encouraged to search :srw-repo:`open issues ` to see if their bug has already been identified. If there is a genuine bug, and there is no open issue to address it, users can report the bug by filing a :srw-repo:`GitHub Issue `. Feature Requests and Enhancements ----------------------------------- @@ -178,6 +185,8 @@ utilities, model code, and infrastructure. As described above, users can post is Contributions to the `ufs-srweather-app `__ repository should follow the guidelines contained in the :srw-wiki:`SRW App Contributor's Guide `. Additionally, users can file issues in component repositories for contributions that directly concern those repositories. For code to be accepted into a component repository, users must follow the code management rules of that component's authoritative repository. These rules are usually outlined in the component's User's Guide (see :numref:`Table %s `) or GitHub wiki for each respective repository (see :numref:`Table %s `). +.. _future-direction: + Future Direction ================= @@ -189,4 +198,4 @@ Users can expect to see incremental improvements and additional capabilities in * Incorporation of additional `Unified Workflow `__ tools. -.. bibliography:: ../references.bib +.. bibliography:: ../../references.bib diff --git a/doc/UsersGuide/source/BackgroundInfo/TechnicalOverview.rst b/doc/UsersGuide/BackgroundInfo/TechnicalOverview.rst similarity index 94% rename from doc/UsersGuide/source/BackgroundInfo/TechnicalOverview.rst rename to doc/UsersGuide/BackgroundInfo/TechnicalOverview.rst index b2a1819670..52365a86e5 100644 --- a/doc/UsersGuide/source/BackgroundInfo/TechnicalOverview.rst +++ b/doc/UsersGuide/BackgroundInfo/TechnicalOverview.rst @@ -138,7 +138,7 @@ The UFS Weather Model contains a number of sub-repositories, which are documente Repository Structure ---------------------- -The ``ufs-srweather-app`` :term:`umbrella repository` is an NCO-compliant repository. Its structure follows the standards laid out in :term:`NCEP` Central Operations (NCO) WCOSS `Implementation Standards `__. This structure is implemented using the ``local_path`` settings contained within the ``Externals.cfg`` file. After ``manage_externals/checkout_externals`` is run (see :numref:`Section %s `), the specific GitHub repositories described in :numref:`Table %s ` are cloned into the target subdirectories shown below. Directories that will be created as part of the build process appear in parentheses and will not be visible until after the build is complete. Some directories have been removed for brevity. +The ``ufs-srweather-app`` :term:`umbrella repository` is an NCO-compliant repository. Its structure follows the standards laid out in :term:`NCEP` Central Operations (NCO) WCOSS :nco:`Implementation Standards `. This structure is implemented using the ``local_path`` settings contained within the ``Externals.cfg`` file. After ``manage_externals/checkout_externals`` is run (see :numref:`Section %s `), the specific GitHub repositories described in :numref:`Table %s ` are cloned into the target subdirectories shown below. Directories that will be created as part of the build process appear in parentheses and will not be visible until after the build is complete. Some directories have been removed for brevity. .. code-block:: console @@ -193,7 +193,7 @@ The ``ufs-srweather-app`` :term:`umbrella repository` is an NCO-compliant reposi SRW App SubDirectories ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -:numref:`Table %s ` describes the contents of the most important SRW App subdirectories. :numref:`Table %s ` provides a more comprehensive explanation of the ``ufs-srweather-app`` files and subdirectories. Users can reference the `NCO Implementation Standards `__ (p. 19) for additional details on repository structure in NCO-compliant repositories. +:numref:`Table %s ` describes the contents of the most important SRW App subdirectories. :numref:`Table %s ` provides a more comprehensive explanation of the ``ufs-srweather-app`` files and subdirectories. Users can reference the :nco:`NCO Implementation Standards ` (p. 19) for additional details on repository structure in NCO-compliant repositories. .. _Subdirectories: diff --git a/doc/UsersGuide/source/BackgroundInfo/index.rst b/doc/UsersGuide/BackgroundInfo/index.rst similarity index 100% rename from doc/UsersGuide/source/BackgroundInfo/index.rst rename to doc/UsersGuide/BackgroundInfo/index.rst diff --git a/doc/UsersGuide/source/BuildingRunningTesting/AQM.rst b/doc/UsersGuide/BuildingRunningTesting/AQM.rst similarity index 99% rename from doc/UsersGuide/source/BuildingRunningTesting/AQM.rst rename to doc/UsersGuide/BuildingRunningTesting/AQM.rst index 6d2ae0f193..7186de6618 100644 --- a/doc/UsersGuide/source/BuildingRunningTesting/AQM.rst +++ b/doc/UsersGuide/BuildingRunningTesting/AQM.rst @@ -123,7 +123,7 @@ The community AQM configuration assumes that users have :term:`HPSS` access and USE_USER_STAGED_EXTRN_FILES: true EXTRN_MDL_SOURCE_BASEDIR_LBCS: /path/to/data -On Level 1 systems, users can find :term:`ICs/LBCs` in the usual :ref:`input data locations ` under ``FV3GFS/netcdf/2023021700`` and ``FV3GFS/netcdf/2023021706``. Users can also download the data required for the community experiment from the `UFS SRW App Data Bucket `__. +On Level 1 systems, users can find :term:`ICs/LBCs` in the usual :ref:`input data locations ` under ``FV3GFS/netcdf/2023021700`` and ``FV3GFS/netcdf/2023021706``. Users can also download the data required for the community experiment from the `UFS SRW App Data Bucket `__. Users may also wish to change :term:`cron`-related parameters in ``config.yaml``. In the ``config.aqm.community.yaml`` file, which was copied into ``config.yaml``, cron is used for automatic submission and resubmission of the workflow: diff --git a/doc/UsersGuide/source/BuildingRunningTesting/BuildSRW.rst b/doc/UsersGuide/BuildingRunningTesting/BuildSRW.rst similarity index 100% rename from doc/UsersGuide/source/BuildingRunningTesting/BuildSRW.rst rename to doc/UsersGuide/BuildingRunningTesting/BuildSRW.rst diff --git a/doc/UsersGuide/source/BuildingRunningTesting/ContainerQuickstart.rst b/doc/UsersGuide/BuildingRunningTesting/ContainerQuickstart.rst similarity index 98% rename from doc/UsersGuide/source/BuildingRunningTesting/ContainerQuickstart.rst rename to doc/UsersGuide/BuildingRunningTesting/ContainerQuickstart.rst index 9e4f58f0bd..d9dd1a0afc 100644 --- a/doc/UsersGuide/source/BuildingRunningTesting/ContainerQuickstart.rst +++ b/doc/UsersGuide/BuildingRunningTesting/ContainerQuickstart.rst @@ -188,8 +188,8 @@ The SRW App requires input files to run. These include static datasets, initial .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/current_srw_release_data/fix_data.tgz - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/current_srw_release_data/gst_data.tgz + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.2.0/out-of-the-box/fix_data.tgz + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.2.0/out-of-the-box/gst_data.tgz tar -xzf fix_data.tgz tar -xzf gst_data.tgz @@ -439,4 +439,4 @@ If users have the PBS resource manager installed on their system, the allocation For more information on the ``qsub`` command options, see the `PBS Manual §2.59.3 `__, (p. 1416). -These commands should output a hostname. Users can then run ``ssh ``. After "ssh-ing" to the compute node, they can run the container from that node. To run larger experiments, it may be necessary to allocate multiple compute nodes. \ No newline at end of file +These commands should output a hostname. Users can then run ``ssh ``. After "ssh-ing" to the compute node, they can run the container from that node. To run larger experiments, it may be necessary to allocate multiple compute nodes. diff --git a/doc/UsersGuide/source/BuildingRunningTesting/DefaultVarsTable.rst b/doc/UsersGuide/BuildingRunningTesting/DefaultVarsTable.rst similarity index 100% rename from doc/UsersGuide/source/BuildingRunningTesting/DefaultVarsTable.rst rename to doc/UsersGuide/BuildingRunningTesting/DefaultVarsTable.rst diff --git a/doc/UsersGuide/source/BuildingRunningTesting/Quickstart.rst b/doc/UsersGuide/BuildingRunningTesting/Quickstart.rst similarity index 100% rename from doc/UsersGuide/source/BuildingRunningTesting/Quickstart.rst rename to doc/UsersGuide/BuildingRunningTesting/Quickstart.rst diff --git a/doc/UsersGuide/source/BuildingRunningTesting/RunSRW.rst b/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst similarity index 96% rename from doc/UsersGuide/source/BuildingRunningTesting/RunSRW.rst rename to doc/UsersGuide/BuildingRunningTesting/RunSRW.rst index 9546471310..b9471acd69 100644 --- a/doc/UsersGuide/source/BuildingRunningTesting/RunSRW.rst +++ b/doc/UsersGuide/BuildingRunningTesting/RunSRW.rst @@ -549,6 +549,7 @@ The ``data:`` section of the machine file can point to various data sources that netcdf: /Users/username/DATA/UFS/FV3GFS/netcdf RAP: /Users/username/DATA/UFS/RAP/grib2 HRRR: /Users/username/DATA/UFS/HRRR/grib2 + RRFS: /Users/username/DATA/UFS/RRFS/grib2 This can be helpful when conducting multiple experiments with different types of data. @@ -584,7 +585,7 @@ the same cycle starting date/time and forecast hours. Other parameters may diffe Cartopy Shapefiles ````````````````````` -The Python plotting tasks require a path to the directory where the Cartopy Natural Earth shapefiles are located. The medium scale (1:50m) cultural and physical shapefiles are used to create coastlines and other geopolitical borders on the map. On :srw-wiki:`Level 1 ` systems, this path is already set in the system's machine file using the variable ``FIXshp``. Users on other systems will need to download the shapefiles and update the path of ``$FIXshp`` in the machine file they are using (e.g., ``$SRW/ush/machine/macos.yaml`` for a generic MacOS system, where ``$SRW`` is the path to the ``ufs-srweather-app`` directory). The subset of shapefiles required for the plotting task can be obtained from the `SRW Data Bucket `__. The full set of medium-scale (1:50m) Cartopy shapefiles can be downloaded `here `__. +The Python plotting tasks require a path to the directory where the Cartopy Natural Earth shapefiles are located. The medium scale (1:50m) cultural and physical shapefiles are used to create coastlines and other geopolitical borders on the map. On :srw-wiki:`Level 1 ` systems, this path is already set in the system's machine file using the variable ``FIXshp``. Users on other systems will need to download the shapefiles and update the path of ``$FIXshp`` in the machine file they are using (e.g., ``$SRW/ush/machine/macos.yaml`` for a generic MacOS system, where ``$SRW`` is the path to the ``ufs-srweather-app`` directory). The subset of shapefiles required for the plotting task can be obtained from the `SRW Data Bucket `__. The full set of medium-scale (1:50m) Cartopy shapefiles can be downloaded `here `__. Task Configuration ````````````````````` @@ -791,6 +792,8 @@ The ``FV3LAM_wflow.xml`` file runs the specific j-job scripts (``jobs/JREGIONAL_ - Run the forecast model (UFS Weather Model) * - run_post_* - Run the post-processing tool (UPP) + * - integration_test_* + - Run integration test In addition to the baseline tasks described in :numref:`Table %s ` above, users may choose to run a variety of optional tasks, including plotting and verification tasks. @@ -983,6 +986,7 @@ The workflow run is complete when all tasks have "SUCCEEDED". If everything goes 201906151800 run_post_mem000_f001 4953245 SUCCEEDED 0 1 4.0 ... 201906151800 run_post_mem000_f012 4953381 SUCCEEDED 0 1 7.0 + 201906151800 integration_test_mem000 4953237 SUCCEEDED 0 1 7.0 If users choose to run METplus verification tasks as part of their experiment, the output above will include additional lines after ``run_post_mem000_f012``. The output will resemble the following but may be significantly longer when using ensemble verification: @@ -1058,6 +1062,7 @@ This will output the last 40 lines of the log file, which lists the status of th 201906151800 run_post_mem000_f004 - - - - - 201906151800 run_post_mem000_f005 - - - - - 201906151800 run_post_mem000_f006 - - - - - + 201906151800 integration_test_mem000 Summary of workflow status: ~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1168,6 +1173,7 @@ The SRW App workflow can be run using standalone shell scripts in cases where th ./run_make_lbcs.sh ./run_fcst.sh ./run_post.sh + ./run_integration_test.sh Each task should finish with error code 0. For example: @@ -1184,31 +1190,33 @@ Check the batch script output file in your experiment directory for a “SUCCESS processors and wall clock time is a good starting point for NOAA HPC systems when running a 48-h forecast on the 25-km CONUS domain. For a brief description of tasks, see :numref:`Table %s `. - +------------+------------------------+----------------+----------------------------+ - | **Stage/** | **Task Run Script** | **Number of** | **Wall Clock Time (H:mm)** | - | | | **Processors** | | - +============+========================+================+============================+ - | 1 | run_get_ics.sh | 1 | 0:20 (depends on HPSS vs | - | | | | FTP vs staged-on-disk) | - +------------+------------------------+----------------+----------------------------+ - | 1 | run_get_lbcs.sh | 1 | 0:20 (depends on HPSS vs | - | | | | FTP vs staged-on-disk) | - +------------+------------------------+----------------+----------------------------+ - | 1 | run_make_grid.sh | 24 | 0:20 | - +------------+------------------------+----------------+----------------------------+ - | 2 | run_make_orog.sh | 24 | 0:20 | - +------------+------------------------+----------------+----------------------------+ - | 3 | run_make_sfc_climo.sh | 48 | 0:20 | - +------------+------------------------+----------------+----------------------------+ - | 4 | run_make_ics.sh | 48 | 0:30 | - +------------+------------------------+----------------+----------------------------+ - | 4 | run_make_lbcs.sh | 48 | 0:30 | - +------------+------------------------+----------------+----------------------------+ - | 5 | run_fcst.sh | 48 | 0:30 | - +------------+------------------------+----------------+----------------------------+ - | 6 | run_post.sh | 48 | 0:25 (2 min per output | - | | | | forecast hour) | - +------------+------------------------+----------------+----------------------------+ + +------------+--------------------------+----------------+----------------------------+ + | **Stage/** | **Task Run Script** | **Number of** | **Wall Clock Time (H:mm)** | + | | | **Processors** | | + +============+==========================+================+============================+ + | 1 | run_get_ics.sh | 1 | 0:20 (depends on HPSS vs | + | | | | FTP vs staged-on-disk) | + +------------+--------------------------+----------------+----------------------------+ + | 1 | run_get_lbcs.sh | 1 | 0:20 (depends on HPSS vs | + | | | | FTP vs staged-on-disk) | + +------------+--------------------------+----------------+----------------------------+ + | 1 | run_make_grid.sh | 24 | 0:20 | + +------------+--------------------------+----------------+----------------------------+ + | 2 | run_make_orog.sh | 24 | 0:20 | + +------------+--------------------------+----------------+----------------------------+ + | 3 | run_make_sfc_climo.sh | 48 | 0:20 | + +------------+--------------------------+----------------+----------------------------+ + | 4 | run_make_ics.sh | 48 | 0:30 | + +------------+--------------------------+----------------+----------------------------+ + | 4 | run_make_lbcs.sh | 48 | 0:30 | + +------------+--------------------------+----------------+----------------------------+ + | 5 | run_fcst.sh | 48 | 0:30 | + +------------+--------------------------+----------------+----------------------------+ + | 6 | run_post.sh | 48 | 0:25 (2 min per output | + | | | | forecast hour) | + +------------+--------------------------+----------------+----------------------------+ + | 7 | run_integration_test.sh | 1 | 0:05 | + +------------+--------------------------+----------------+----------------------------+ Users can access log files for specific tasks in the ``$EXPTDIR/log`` directory. To see how the experiment is progressing, users can also check the end of the ``log.launch_FV3LAM_wflow`` file from the command line: diff --git a/doc/UsersGuide/source/BuildingRunningTesting/Tutorial.rst b/doc/UsersGuide/BuildingRunningTesting/Tutorial.rst similarity index 99% rename from doc/UsersGuide/source/BuildingRunningTesting/Tutorial.rst rename to doc/UsersGuide/BuildingRunningTesting/Tutorial.rst index 445dee1b8f..a21b7aa9bd 100644 --- a/doc/UsersGuide/source/BuildingRunningTesting/Tutorial.rst +++ b/doc/UsersGuide/BuildingRunningTesting/Tutorial.rst @@ -45,12 +45,12 @@ On :srw-wiki:`Level 1 ` systems, users can fi * FV3GFS data for the first forecast (``control``) is located at: - * https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#input_model_data/FV3GFS/grib2/2019061518/ + * https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#develop-20240618/input_model_data/FV3GFS/grib2/2019061518/ * HRRR and RAP data for the second forecast (``test_expt``) is located at: - * https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#input_model_data/HRRR/2019061518/ - * https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#input_model_data/RAP/2019061518/ + * https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#develop-20240618/input_model_data/HRRR/2019061518/ + * https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#develop-20240618/input_model_data/RAP/2019061518/ Load the Workflow -------------------- diff --git a/doc/UsersGuide/source/BuildingRunningTesting/VXCases.rst b/doc/UsersGuide/BuildingRunningTesting/VXCases.rst similarity index 96% rename from doc/UsersGuide/source/BuildingRunningTesting/VXCases.rst rename to doc/UsersGuide/BuildingRunningTesting/VXCases.rst index 080e180b14..2bf6f775d0 100644 --- a/doc/UsersGuide/source/BuildingRunningTesting/VXCases.rst +++ b/doc/UsersGuide/BuildingRunningTesting/VXCases.rst @@ -45,21 +45,21 @@ On :srw-wiki:`Level 1 ` systems, users can fi On other systems, users need to download the ``Indy-Severe-Weather.tgz`` file using any of the following methods: - #. Download directly from the S3 bucket using a browser. The data is available at https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#sample_cases/release-public-v2.2.0/. + #. Download directly from the S3 bucket using a browser. The data is available at https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#experiment-user-cases/release-public-v2.2.0/METplus-vx-sample/. #. Download from a terminal using the AWS command line interface (CLI), if installed: .. code-block:: console - aws s3 cp https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#sample_cases/release-public-v2.2.0/Indy-Severe-Weather.tgz Indy-Severe-Weather.tgz + aws s3 cp https://noaa-ufs-srw-pds.s3.amazonaws.com/index.html#experiment-user-cases/release-public-v2.2.0/METplus-vx-sample/Indy-Severe-Weather.tgz Indy-Severe-Weather.tgz #. Download from a terminal using ``wget``: .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/sample_cases/release-public-v2.2.0/Indy-Severe-Weather.tgz + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.2.0/METplus-vx-sample/Indy-Severe-Weather.tgz -This tar file contains :term:`IC/LBC ` files, observation data, model/forecast output, and MET verification output for the sample forecast. Users who have never run the SRW App on their system before will also need to download (1) the fix files required for SRW App forecasts and (2) the NaturalEarth shapefiles required for plotting. Users can download the fix file data from a browser at https://noaa-ufs-srw-pds.s3.amazonaws.com/current_srw_release_data/fix_data.tgz or visit :numref:`Section %s ` for instructions on how to download the data with ``wget``. NaturalEarth files are available at https://noaa-ufs-srw-pds.s3.amazonaws.com/NaturalEarth/NaturalEarth.tgz. See the :numref:`Section %s ` for more information on plotting. +This tar file contains :term:`IC/LBC ` files, observation data, model/forecast output, and MET verification output for the sample forecast. Users who have never run the SRW App on their system before will also need to download (1) the fix files required for SRW App forecasts and (2) the NaturalEarth shapefiles required for plotting. Users can download the fix file data from a browser at https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.2.0/out-of-the-box/fix_data.tgz or visit :numref:`Section %s ` for instructions on how to download the data with ``wget``. NaturalEarth files are available at https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/NaturalEarth/NaturalEarth.tgz. See the :numref:`Section %s ` for more information on plotting. After downloading ``Indy-Severe-Weather.tgz`` using one of the three methods above, untar the downloaded compressed archive file: diff --git a/doc/UsersGuide/source/BuildingRunningTesting/WE2Etests.rst b/doc/UsersGuide/BuildingRunningTesting/WE2Etests.rst similarity index 99% rename from doc/UsersGuide/source/BuildingRunningTesting/WE2Etests.rst rename to doc/UsersGuide/BuildingRunningTesting/WE2Etests.rst index 031038d1d7..b3a7bf847b 100644 --- a/doc/UsersGuide/source/BuildingRunningTesting/WE2Etests.rst +++ b/doc/UsersGuide/BuildingRunningTesting/WE2Etests.rst @@ -32,7 +32,7 @@ WE2E Test Categories WE2E tests are grouped into two categories that are of interest to code developers: ``fundamental`` and ``comprehensive`` tests. "Fundamental" tests are a lightweight but wide-reaching set of tests designed to function as a cheap "`smoke test `__" for changes to the UFS SRW App. The fundamental suite of tests runs common combinations of workflow tasks, physical domains, input data, physics suites, etc. The comprehensive suite of tests covers a broader range of combinations of capabilities, configurations, and components, ideally including all capabilities that *can* be run on a given platform. Because some capabilities are not available on all platforms (e.g., retrieving data directly from NOAA HPSS), the suite of comprehensive tests varies from machine to machine. -The list of fundamental and comprehensive tests can be viewed in the ``ufs-srweather-app/tests/WE2E/machine_suites/`` directory, and the tests are described in more detail in :doc:`this table <../tables/Tests>`. +The list of fundamental and comprehensive tests can be viewed in the ``ufs-srweather-app/tests/WE2E/machine_suites/`` directory, and the tests are described in more detail in :doc:`this table <../../tables/Tests>`. .. note:: @@ -78,6 +78,7 @@ For convenience, the WE2E tests are currently grouped into the following categor FV3GFS: RAP: HRRR: + RRFS: Some tests are duplicated among the above categories via symbolic links, both for legacy reasons (when tests for different capabilities were consolidated) and for convenience when a user would like to run all tests for a specific category (e.g., verification tests). @@ -169,7 +170,7 @@ The script to run the WE2E tests is named ``run_WE2E_tests.py`` and is located i .. note:: - The full list of WE2E tests is extensive, and some larger, high-resolution tests are computationally expensive. Estimates of walltime and core-hour cost for each test are provided in :doc:`this table <../tables/Tests>`. + The full list of WE2E tests is extensive, and some larger, high-resolution tests are computationally expensive. Estimates of walltime and core-hour cost for each test are provided in :doc:`this table <../../tables/Tests>`. Using the Test Script ---------------------- diff --git a/doc/UsersGuide/source/BuildingRunningTesting/index.rst b/doc/UsersGuide/BuildingRunningTesting/index.rst similarity index 100% rename from doc/UsersGuide/source/BuildingRunningTesting/index.rst rename to doc/UsersGuide/BuildingRunningTesting/index.rst diff --git a/doc/UsersGuide/source/CustomizingTheWorkflow/ConfigWorkflow.rst b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst similarity index 96% rename from doc/UsersGuide/source/CustomizingTheWorkflow/ConfigWorkflow.rst rename to doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst index 7a17dd1719..1db91121a1 100644 --- a/doc/UsersGuide/source/CustomizingTheWorkflow/ConfigWorkflow.rst +++ b/doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst @@ -20,7 +20,7 @@ USER Configuration Parameters If non-default parameters are selected for the variables in this section, they should be added to the ``user:`` section of the ``config.yaml`` file. ``RUN_ENVIR``: (Default: "nco") - This variable determines the workflow mode. The user can choose between two options: "nco" and "community". The "nco" mode uses a directory structure that mimics what is used in operations at NOAA/NCEP Central Operations (NCO) and at the NOAA/NCEP/Environmental Modeling Center (EMC), which works with NCO on pre-implementation testing. Specifics of the conventions used in "nco" mode can be found in the following `WCOSS Implementation Standards `__ document: + This variable determines the workflow mode. The user can choose between two options: "nco" and "community". The "nco" mode uses a directory structure that mimics what is used in operations at NOAA/NCEP Central Operations (NCO) and at the NOAA/NCEP/Environmental Modeling Center (EMC), which works with NCO on pre-implementation testing. Specifics of the conventions used in "nco" mode can be found in the following :nco:`WCOSS Implementation Standards <>` document: | NCEP Central Operations | WCOSS Implementation Standards @@ -30,7 +30,7 @@ If non-default parameters are selected for the variables in this section, they s Setting ``RUN_ENVIR`` to "community" is recommended in most cases for users who are not running in NCO's production environment. Valid values: ``"nco"`` | ``"community"`` ``MACHINE``: (Default: "BIG_COMPUTER") - The machine (a.k.a. platform or system) on which the workflow will run. Currently supported platforms are listed on the :srw-wiki:`SRW App Wiki page `. When running the SRW App on any ParallelWorks/NOAA Cloud system, use "NOAACLOUD" regardless of the underlying system (AWS, GCP, or Azure). Valid values: ``"HERA"`` | ``"ORION"`` | ``"HERCULES"`` | ``"JET"`` | ``"CHEYENNE"`` | ``"DERECHO"`` | ``"GAEA"`` | ``"GAEA-C5"`` | ``"NOAACLOUD"`` | ``"STAMPEDE"`` | ``"ODIN"`` | ``"MACOS"`` | ``"LINUX"`` | ``"SINGULARITY"`` | ``"WCOSS2"`` (Check ``ufs-srweather-app/ush/valid_param_vals.yaml`` for the most up-to-date list of supported platforms.) + The machine (a.k.a. platform or system) on which the workflow will run. Currently supported platforms are listed on the :srw-wiki:`SRW App Wiki page `. When running the SRW App on any ParallelWorks/NOAA Cloud system, use "NOAACLOUD" regardless of the underlying system (AWS, GCP, or Azure). Valid values: ``"HERA"`` | ``"ORION"`` | ``"HERCULES"`` | ``"JET"`` | ``"CHEYENNE"`` | ``"DERECHO"`` | ``"GAEA"`` | ``"NOAACLOUD"`` | ``"STAMPEDE"`` | ``"ODIN"`` | ``"MACOS"`` | ``"LINUX"`` | ``"SINGULARITY"`` | ``"WCOSS2"`` (Check ``ufs-srweather-app/ush/valid_param_vals.yaml`` for the most up-to-date list of supported platforms.) .. hint:: Users who are NOT on a named, supported Level 1 or 2 platform will need to set the ``MACHINE`` variable to ``LINUX`` or ``MACOS``. To combine use of a Linux or MacOS platform with the Rocoto workflow manager, users will also need to set ``WORKFLOW_MANAGER: "rocoto"`` in the ``platform:`` section of ``config.yaml``. This combination will assume a Slurm batch manager when generating the XML. @@ -156,8 +156,8 @@ These settings define platform-specific run commands. Users should set run comma ``RUN_CMD_SERIAL``: (Default: "") The run command for some serial jobs. -``RUN_CMD_AQM``: (Default: "") - The run command for some AQM tasks. +``RUN_CMD_NEXUS``: (Default: "") + The run command for the AQM NEXUS tasks. ``RUN_CMD_NEXUS``: (Default: "") The run command for the AQM NEXUS tasks. @@ -275,10 +275,17 @@ These parameters are associated with the fixed (i.e., static) files. On :srw-wik System directory containing the graphics shapefiles. On Level 1 systems, these are set within the machine files. Users on other systems will need to provide the path to the directory that contains the *Natural Earth* shapefiles. ``FIXaqm``: (Default: "") +<<<<<<< HEAD:doc/UsersGuide/source/CustomizingTheWorkflow/ConfigWorkflow.rst System directory where AQM data files are located. ``FIXemis``: (Default: "") System directory where AQM emission data files are located. +======= + Path to system directory containing AQM fixed files. + +``FIXemis``: (Default: "") + Path to system directory containing AQM emission data files. +>>>>>>> origin/develop:doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst ``FIXcrtm``: (Default: "") Path to system directory containing CRTM fixed files. @@ -296,7 +303,11 @@ WORKFLOW Configuration Parameters If non-default parameters are selected for the variables in this section, they should be added to the ``workflow:`` section of the ``config.yaml`` file. +<<<<<<< HEAD:doc/UsersGuide/source/CustomizingTheWorkflow/ConfigWorkflow.rst ``WORKFLOW_ID``: (Default: "") +======= +``WORKFLOW_ID``: (Default: ``''``) +>>>>>>> origin/develop:doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst Unique ID for the workflow run that will be set in ``setup.py``. ``RELATIVE_LINK_FLAG``: (Default: "--relative") @@ -395,8 +406,8 @@ Set File Name Parameters ``MODEL_CONFIG_FN``: (Default: "model_configure") Name of a file that contains settings and configurations for the :term:`NUOPC`/:term:`ESMF` main component. In general, users should not set this variable in their configuration file (see :ref:`note `). -``NEMS_CONFIG_FN``: (Default: "nems.configure") - Name of a file that contains information about the various :term:`NEMS` components and their run sequence. In general, users should not set this variable in their configuration file (see :ref:`note `). +``UFS_CONFIG_FN``: (Default: "ufs.configure") + Name of a file that contains information about the various :term:`UFS` components and their run sequence. In general, users should not set this variable in their configuration file (see :ref:`note `). ``UFS_CONFIG_FN``: (Default: "ufs.configure") Name of a template file that contains information about the various UFS components and their run sequence (ufs-weather-model: ufs.configure). Its default value is the name of the file that the ufs weather model expects to read in. @@ -431,8 +442,8 @@ Set File Path Parameters ``MODEL_CONFIG_TMPL_FP``: (Default: ``'{{ [user.PARMdir, MODEL_CONFIG_FN]|path_join }}'``) Path to the ``MODEL_CONFIG_FN`` file. -``NEMS_CONFIG_TMPL_FP``: (Default: ``'{{ [user.PARMdir, NEMS_CONFIG_FN]|path_join }}'``) - Path to the ``NEMS_CONFIG_FN`` file. +``UFS_CONFIG_TMPL_FP``: (Default: ``'{{ [user.PARMdir, UFS_CONFIG_FN]|path_join }}'``) + Path to the ``UFS_CONFIG_FN`` file. ``UFS_CONFIG_TMPL_FP:``: (Default: ``'{{ [user.PARMdir, UFS_CONFIG_FN]|path_join }}'``) Path to the ``UFS_CONFIG_FN`` file. @@ -452,8 +463,8 @@ This section contains files and paths to files that are staged in the experiment ``FIELD_TABLE_FP``: (Default: ``'{{ [EXPTDIR, FIELD_TABLE_FN]|path_join }}'``) Path to the field table in the experiment directory. (The field table specifies tracers that the forecast model reads in.) -``NEMS_CONFIG_FP``: (Default: ``'{{ [EXPTDIR, NEMS_CONFIG_FN]|path_join }}'``) - Path to the ``NEMS_CONFIG_FN`` file in the experiment directory. +``UFS_CONFIG_FP``: (Default: ``'{{ [EXPTDIR, UFS_CONFIG_FN]|path_join }}'``) + Path to the ``UFS_CONFIG_FN`` file in the experiment directory. ``UFS_CONFIG_FP``: (Default: ``'{{ [EXPTDIR, UFS_CONFIG_FN]|path_join }}'``) Path to the ``UFS_CONFIG_FN`` file in the experiment directory. @@ -471,7 +482,11 @@ This section contains files and paths to files that are staged in the experiment Name of the Rocoto workflow XML file that the experiment generation script creates. This file defines the workflow for the experiment. ``GLOBAL_VAR_DEFNS_FN``: (Default: "var_defns.yaml") +<<<<<<< HEAD:doc/UsersGuide/source/CustomizingTheWorkflow/ConfigWorkflow.rst Name of the experiment configuration file. It contains the primary experiment variables defined in this default configuration script and in the user-specified configuration as well as secondary experiment variables generated by the experiment generation script. This file is the primary source of information used in the scripts at run time. The primary variables are defined in the default configuration file (``config_defaults.yaml``) and in the user configuration file (``config.yaml``). The secondary experiment variables are generated by the experiment generation script. +======= + Name of the auto-generated experiment configuration file. It contains the primary experiment variables defined in this default configuration script and in the user-specified configuration as well as secondary experiment variables generated by the experiment generation script from machine files and other settings. This file is the primary source of information used in the scripts at run time. +>>>>>>> origin/develop:doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst ``ROCOTO_YAML_FN``: (Default: "rocoto_defns.yaml") Name of the YAML file containing the YAML workflow definition from which the Rocoto XML file is created. @@ -550,7 +565,11 @@ CCPP Parameter ``CCPP_PHYS_SUITE_FP``: (Default: ``'{{ [workflow.EXPTDIR, CCPP_PHYS_SUITE_FN]|path_join }}'``) The full path to the suite definition file (SDF) in the experiment directory. +<<<<<<< HEAD:doc/UsersGuide/source/CustomizingTheWorkflow/ConfigWorkflow.rst ``CCPP_PHYS_DIR``: (Default: ``'{{ [user.UFS_WTHR_MDL_DIR, "FV3", "ccpp", "physics", "physics", "SFC_models", "Land", "Noahmp"] |path_join }}'``) +======= +``CCPP_PHYS_DIR``: (Default: ``'{{ [user.UFS_WTHR_MDL_DIR, "FV3", "ccpp", "physics", "physics", "SFC_Models", "Land", "Noahmp"] |path_join }}'``) +>>>>>>> origin/develop:doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst The directory containing the CCPP physics source code. This is needed to link table(s) contained in that repository. Field Dictionary Parameters @@ -717,9 +736,9 @@ A standard set of environment variables has been established for *nco* mode to s Only *community* mode is fully supported for releases. *nco* mode is used by those at the Environmental Modeling Center (EMC) and Global Systems Laboratory (GSL) who are working on pre-implementation operational testing. Other users should run the SRW App in *community* mode. ``envir_default, NET_default, model_ver_default, RUN_default``: - Standard environment variables defined in the NCEP Central Operations WCOSS Implementation Standards document. These variables are used in forming the path to various directories containing input, output, and workflow files. The variables are defined in the `WCOSS Implementation Standards `__ document (pp. 4-5) as follows: + Standard environment variables defined in the NCEP Central Operations WCOSS Implementation Standards document. These variables are used in forming the path to various directories containing input, output, and workflow files. The variables are defined in the :nco:`WCOSS Implementation Standards ` document (pp. 4-5) as follows: - ``envir_default``: (Default: "para") + ``envir_default``: (Default: "test") Set to "test" during the initial testing phase, "para" when running in parallel (on a schedule), and "prod" in production. ``NET_default``: (Default: "srw") @@ -731,46 +750,28 @@ A standard set of environment variables has been established for *nco* mode to s ``RUN_default``: (Default: "srw") Name of model run (third level of ``com`` directory structure). In general, same as ``${NET_default}``. -``OPSROOT_default``: (Default: ``'{{ workflow.EXPT_BASEDIR }}/../nco_dirs'``) - The operations root directory in *nco* mode. - -``COMROOT_default``: (Default: ``'{{ OPSROOT_default }}/com'``) - The ``com`` root directory for input/output data that is located on the current system (typically ``$OPSROOT_default/com``). - -``DATAROOT_default``: (Default: ``'{{OPSROOT_default }}/tmp'``) - Directory containing the (temporary) working directory for running jobs; typically named ``$OPSROOT_default/tmp`` in production. - -``DCOMROOT_default``: (Default: ``'{{OPSROOT_default }}/dcom'``) - ``dcom`` root directory, typically ``$OPSROOT_default/dcom``. This directory contains input/incoming data that is retrieved from outside WCOSS. - -``LOGBASEDIR_default``: (Default: ``'{% if user.RUN_ENVIR == "nco" %}{{ [OPSROOT_default, "output"]|path_join }}{% else %}{{ [workflow.EXPTDIR, "log"]|path_join }}{% endif %}'``) - Directory in which the log files from the workflow tasks will be placed. - -``COMIN_BASEDIR``: (Default: ``'{{ COMROOT_default }}/{{ NET_default }}/{{ model_ver_default }}'``) - ``com`` directory for current model's input data, typically ``$COMROOT/$NET/$model_ver/$RUN.$PDY``. - -``COMOUT_BASEDIR``: (Default: ``'{{ COMROOT_default }}/{{ NET_default }}/{{ model_ver_default }}'``) - ``com`` directory for current model's output data, typically ``$COMROOT/$NET/$model_ver/$RUN.$PDY``. +``PTMP``: (Default ``'{{ workflow.EXPT_BASEDIR }}/../nco_dirs'``) + User-defined path to the com type directories (``OPSROOT=$PTMP/$envir``). ``DBNROOT_default``: (Default: "") Root directory for the data-alerting utilities. -``SENDECF_default``: (Default: false) +``SENDECF_default``: (Default: "NO") Boolean variable used to control ``ecflow_client`` child commands. -``SENDDBN_default``: (Default: false) +``SENDDBN_default``: (Default: "NO") Boolean variable used to control sending products off WCOSS2. -``SENDDBN_NTC_default``: (Default: false) +``SENDDBN_NTC_default``: (Default: "NO") Boolean variable used to control sending products with WMO headers off WCOSS2. -``SENDCOM_default``: (Default: false) +``SENDCOM_default``: (Default: "YES") Boolean variable to control data copies to ``$COMOUT``. -``SENDWEB_default``: (Default: false) +``SENDWEB_default``: (Default: "NO") Boolean variable used to control sending products to a web server, often ``ncorzdm``. -``KEEPDATA_default``: (Default: true) +``KEEPDATA_default``: (Default: "YES") Boolean variable used to specify whether or not the working directory should be kept upon successful job completion. ``MAILTO_default``: (Default: "") @@ -942,7 +943,7 @@ Basic Task Parameters For each workflow task, certain parameter values must be passed to the job scheduler (e.g., Slurm), which submits a job for the task. ``EXTRN_MDL_NAME_ICS``: (Default: "FV3GFS") - The name of the external model that will provide fields from which initial condition (IC) files, surface files, and 0-th hour boundary condition files will be generated for input into the forecast model. Valid values: ``"GSMGFS"`` | ``"FV3GFS"`` | ``"GEFS"`` | ``"GDAS"`` | ``"RAP"`` | ``"HRRR"`` | ``"NAM"`` | ``"UFS-CASE-STUDY"`` + The name of the external model that will provide fields from which initial condition (IC) files, surface files, and 0-th hour boundary condition files will be generated for input into the forecast model. Valid values: ``"GSMGFS"`` | ``"FV3GFS"`` | ``"GEFS"`` | ``"GDAS"`` | ``"RAP"`` | ``"HRRR"`` | ``"RRFS"`` | ``"NAM"`` | ``"UFS-CASE-STUDY"`` ``EXTRN_MDL_ICS_OFFSET_HRS``: (Default: 0) Users may wish to start a forecast using forecast data from a previous cycle of an external model. This variable indicates how many hours earlier the external model started than the FV3 forecast configured here. For example, if the forecast should start from a 6-hour forecast of the GFS, then ``EXTRN_MDL_ICS_OFFSET_HRS: "6"``. @@ -996,7 +997,7 @@ Basic Task Parameters For each workflow task, certain parameter values must be passed to the job scheduler (e.g., Slurm), which submits a job for the task. ``EXTRN_MDL_NAME_LBCS``: (Default: "FV3GFS") - The name of the external model that will provide fields from which lateral boundary condition (LBC) files (except for the 0-th hour LBC file) will be generated for input into the forecast model. Valid values: ``"GSMGFS"`` | ``"FV3GFS"`` | ``"GEFS"`` | ``"GDAS"`` | ``"RAP"`` | ``"HRRR"`` | ``"NAM"`` | ``"UFS-CASE-STUDY"`` + The name of the external model that will provide fields from which lateral boundary condition (LBC) files (except for the 0-th hour LBC file) will be generated for input into the forecast model. Valid values: ``"GSMGFS"`` | ``"FV3GFS"`` | ``"GEFS"`` | ``"GDAS"`` | ``"RAP"`` | ``"HRRR"`` | ``"RRFS"`` | ``"NAM"`` | ``"UFS-CASE-STUDY"`` ``LBC_SPEC_INTVL_HRS``: (Default: 6) The interval (in integer hours) at which LBC files will be generated. This is also referred to as the *boundary update interval*. Note that the model selected in ``EXTRN_MDL_NAME_LBCS`` must have data available at a frequency greater than or equal to that implied by ``LBC_SPEC_INTVL_HRS``. For example, if ``LBC_SPEC_INTVL_HRS`` is set to "6", then the model must have data available at least every 6 hours. It is up to the user to ensure that this is the case. @@ -1126,8 +1127,13 @@ For each workflow task, certain parameter values must be passed to the job sched For more information, see the `Intel Development Reference Guide `__. +<<<<<<< HEAD:doc/UsersGuide/source/CustomizingTheWorkflow/ConfigWorkflow.rst ``OMP_NUM_THREADS_RUN_FCST``: (Default: 2) The number of OpenMP threads to use for parallel regions. Corresponds to the ``ATM_omp_num_threads`` value in ``nems.configure``. +======= +``OMP_NUM_THREADS_RUN_FCST``: (Default: 1) + The number of OpenMP threads to use for parallel regions. Corresponds to the ``atmos_nthreads`` value in ``model_configure``. +>>>>>>> origin/develop:doc/UsersGuide/CustomizingTheWorkflow/ConfigWorkflow.rst ``OMP_STACKSIZE_RUN_FCST``: (Default: "1024m") Controls the size of the stack for threads created by the OpenMP implementation. @@ -1367,7 +1373,7 @@ PLOT_ALLVARS Configuration Parameters Typically, the following parameters must be set explicitly by the user in the ``task_plot_allvars:`` section of the configuration file (``config.yaml``) when executing the plotting tasks. ``COMOUT_REF``: (Default: "") - Path to the reference experiment's COMOUT directory. This is the directory where the GRIB2 files from post-processing are located. In *community* mode (i.e., when ``RUN_ENVIR: "community"``), this directory will correspond to the location in the experiment directory where the post-processed output can be found (e.g., ``$EXPTDIR/$DATE_FIRST_CYCL/postprd``). In *nco* mode, this directory should be set to the location of the ``COMOUT`` directory and end with ``$PDY/$cyc``. For more detail on *nco* standards and directory naming conventions, see `WCOSS Implementation Standards `__ (particularly pp. 4-5). + Path to the reference experiment's COMOUT directory. This is the directory where the GRIB2 files from post-processing are located. In *community* mode (i.e., when ``RUN_ENVIR: "community"``), this directory will correspond to the location in the experiment directory where the post-processed output can be found (e.g., ``$EXPTDIR/$DATE_FIRST_CYCL/postprd``). In *nco* mode, this directory should be set to the location of the ``COMOUT`` directory and end with ``$PDY/$cyc``. For more detail on *nco* standards and directory naming conventions, see :nco:`WCOSS Implementation Standards ` (particularly pp. 4-5). ``PLOT_FCST_START``: (Default: 0) The starting forecast hour for the plotting task. For example, if a forecast starts at 18h/18z, this is considered the 0th forecast hour, so "starting forecast hour" should be 0, not 18. If a forecast starts at 18h/18z, but the user only wants plots from the 6th forecast hour on, "starting forecast hour" should be 6. @@ -1394,6 +1400,9 @@ Non-default parameters for the ``nexus_emission_*`` tasks are set in the ``task_ ``PPN_NEXUS_EMISSION``: (Default: ``'{{ platform.NCORES_PER_NODE // OMP_NUM_THREADS_NEXUS_EMISSION }}'``) Processes per node for the ``nexus_emission_*`` tasks. +``NNODES_NEXUS_EMISSION``: (Default: 4) + The number of nodes to request from the job scheduler for the NEXUS emission task. + ``KMP_AFFINITY_NEXUS_EMISSION``: (Default: "scatter") Intel Thread Affinity Interface for the ``nexus_emission_*`` tasks. See :ref:`this note ` for more information on thread affinity. @@ -1403,12 +1412,20 @@ Non-default parameters for the ``nexus_emission_*`` tasks are set in the ``task_ ``OMP_STACKSIZE_NEXUS_EMISSION``: (Default: "1024m") Controls the size of the stack for threads created by the OpenMP implementation. +POINT_SOURCE Configuration Parameters +------------------------------------------------ +Non-default parameters for the ``task_point_source`` tasks are set in the ``task_point_source:`` section of the ``config.yaml`` file. + +``PT_SRC_SUBDIR``: (Default: ``"NEI2016v1/v2023-01-PT"``) + Subdirectory structure of point source data under ``FIXemis``. + Full path: ``FIXemis/PT_SRC_SUBDIR`` + BIAS_CORRECTION_O3 Configuration Parameters ------------------------------------------------- Non-default parameters for the ``bias_correction_o3`` tasks are set in the ``task_bias_correction_o3:`` section of the ``config.yaml`` file. -``KMP_AFFINITY_BIAS_CORRECTION_O3``: "scatter" +``KMP_AFFINITY_BIAS_CORRECTION_O3``: (Default: "scatter") Intel Thread Affinity Interface for the ``bias_correction_o3`` task. See :ref:`this note ` for more information on thread affinity. ``OMP_NUM_THREADS_BIAS_CORRECTION_O3``: (Default: 32) @@ -1762,38 +1779,14 @@ Non-default parameters for coupled Air Quality Modeling (AQM) tasks are set in t ``DO_AQM_SAVE_FIRE``: (Default: false) Archive fire emission file to HPSS. -``DCOMINbio_default``: (Default: "") - Path to the directory containing AQM bio files. - -``DCOMINdust_default``: (Default: "/path/to/dust/dir") - Path to the directory containing AQM dust file. - -``DCOMINcanopy_default``: (Default: "/path/to/canopy/dir") - Path to the directory containing AQM canopy files. - -``DCOMINfire_default``: (Default: "") - Path to the directory containing AQM fire files. - -``DCOMINchem_lbcs_default``: (Default: "") - Path to the directory containing chemical LBC files. - -``DCOMINgefs_default``: (Default: "") - Path to the directory containing GEFS aerosol LBC files. - -``DCOMINpt_src_default``: (Default: "/path/to/point/source/base/directory") - Parent directory containing point source files. - -``DCOMINairnow_default``: (Default: "/path/to/airnow/obaservation/data") +``COMINairnow_default``: (Default: "/path/to/airnow/observation/data") Path to the directory containing AIRNOW observation data. -``COMINbicor``: (Default: "/path/to/historical/airnow/data/dir") - Path of reading in historical training data for bias correction. - -``COMOUTbicor``: (Default: "/path/to/historical/airnow/data/dir") - Path to save the current cycle's model output and AirNow observations as training data for future use. ``$COMINbicor`` and ``$COMOUTbicor`` can be distinguished by the ``${yyyy}${mm}${dd}`` under the same location. +``COMINfire_default``: (Default: "") + Path to the directory containing AQM fire files. -``AQM_CONFIG_DIR``: (Default: "") - Configuration directory for AQM. +``COMINgefs_default``:(Default: "") + Path to the directory containing GEFS aerosol LBC files. ``AQM_BIO_FILE``: (Default: "BEIS_SARC401.ncf") File name of AQM BIO file. @@ -1819,9 +1812,6 @@ Non-default parameters for coupled Air Quality Modeling (AQM) tasks are set in t ``AQM_FIRE_FILE_OFFSET_HRS``: (Default: 0) Time offset when retrieving fire emission data files. In a real-time run, the data files for :term:`ICs/LBCs` are not ready for use until the case starts. To resolve this issue, a real-time run uses the input data files in the previous cycle. For example, if the experiment run cycle starts at 12z, and ``AQM_FIRE_FILE_OFFSET_HRS: 6``, the fire emission data file from the previous cycle (06z) is used. -``AQM_FIRE_ARCHV_DIR``: (Default: "/path/to/archive/dir/for/RAVE/on/HPSS") - Path to the archive directory for RAVE emission files on :term:`HPSS`. - ``AQM_RC_FIRE_FREQUENCY``: (Default: "static") Fire frequency in ``aqm.rc``. @@ -1840,12 +1830,6 @@ Non-default parameters for coupled Air Quality Modeling (AQM) tasks are set in t ``AQM_GEFS_FILE_CYC``: (Default: "") Cycle of the GEFS aerosol LBC files only if it is fixed. -``NEXUS_INPUT_DIR``: (Default: "") - Same as ``GRID_DIR`` but for the the air quality emission generation task. Should be blank for the default value specified in ``setup.sh``. - -``NEXUS_FIX_DIR``: (Default: "") - Directory containing ``grid_spec`` files as the input file of NEXUS. - ``NEXUS_GRID_FN``: (Default: "grid_spec_GSD_HRRR_25km.nc") File name of the input ``grid_spec`` file of NEXUS. diff --git a/doc/UsersGuide/source/CustomizingTheWorkflow/DefineWorkflow.rst b/doc/UsersGuide/CustomizingTheWorkflow/DefineWorkflow.rst similarity index 100% rename from doc/UsersGuide/source/CustomizingTheWorkflow/DefineWorkflow.rst rename to doc/UsersGuide/CustomizingTheWorkflow/DefineWorkflow.rst diff --git a/doc/UsersGuide/source/CustomizingTheWorkflow/InputOutputFiles.rst b/doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst similarity index 91% rename from doc/UsersGuide/source/CustomizingTheWorkflow/InputOutputFiles.rst rename to doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst index d8266e74e8..bf24055de4 100644 --- a/doc/UsersGuide/source/CustomizingTheWorkflow/InputOutputFiles.rst +++ b/doc/UsersGuide/CustomizingTheWorkflow/InputOutputFiles.rst @@ -20,8 +20,9 @@ The external model files needed for initializing an experiment can be obtained i ways, including: * Pulled from the `SRW App Data Bucket `__, - * Pulled from the NOAA High Performance Storage System (:term:`HPSS`) during the workflow execution (requires user access), or - * Obtained and staged by the user from a different source. + * Pulled from the NOAA High Performance Storage System (:term:`HPSS`) during the workflow execution (requires user access), + * Obtained and staged by the user from a different source, or + * Pulled from the `RRFS data bucket (rrfs_a data) `_. The data format for these files can be :term:`GRIB2` or :term:`NEMSIO`. More information on downloading and setting up the external model data can be found in :numref:`Section %s `. Once the data is set up, the end-to-end application will run the system and write output files to disk. @@ -168,8 +169,8 @@ If users wish to modify the fields or levels that are output from the UPP, they This process requires advanced knowledge of which fields can be output for the UFS Weather Model. UPP Product Output Tables for the UFS SRW LAM Grid: - * :doc:`3D Native Hybrid Level Fields <../tables/SRW_NATLEV_table>` - * :doc:`3D Pressure Level Fields <../tables/SRW_PRSLEV_table>` + * :doc:`3D Native Hybrid Level Fields <../../tables/SRW_NATLEV_table>` + * :doc:`3D Pressure Level Fields <../../tables/SRW_PRSLEV_table>` Use the instructions in the `UPP User's Guide `__ to make modifications to the ``fv3lam.xml`` file and to remake the flat text file, called ``postxconfig-NT-fv3lam.txt`` (default), that the UPP reads. @@ -225,14 +226,14 @@ A set of input files, including static (fix) data and raw initial and lateral bo Static Files -------------- -Static files are available in the `"fix" directory `__ of the SRW App Data Bucket. Users can download the full set of fix files as a tar file: +Static files are available in the `"fix" directory `__ of the SRW App Data Bucket. Users can download the full set of fix files as a tar file: .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/current_srw_release_data/fix_data.tgz + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.2.0/out-of-the-box/fix_data.tgz tar -xzf fix_data.tgz -Alternatively, users can download the static files individually from the `"fix" directory `__ of the SRW Data Bucket using the ``wget`` command for each required file. Users will need to create an appropriate directory structure for the files when downloading them individually. The best solution is to download the files into directories that mirror the structure of the `Data Bucket `__. +Alternatively, users can download the static files individually from the `"fix" directory `__ of the SRW Data Bucket using the ``wget`` command for each required file. Users will need to create an appropriate directory structure for the files when downloading them individually. The best solution is to download the files into directories that mirror the structure of the `Data Bucket `__. The environment variables ``FIXgsm``, ``FIXorg``, and ``FIXsfc`` indicate the path to the directories where the static files are located. After downloading the experiment data, users must set the paths to the files in ``config.yaml``. Add the following code to the ``task_run_fcst:`` section of the ``config.yaml`` file, and alter the variable paths accordingly: @@ -246,13 +247,13 @@ The environment variables ``FIXgsm``, ``FIXorg``, and ``FIXsfc`` indicate the pa Initial Condition/Lateral Boundary Condition File Formats and Source ----------------------------------------------------------------------- -The SRW Application currently supports raw initial and lateral boundary conditions from numerous models (i.e., FV3GFS, GEFS, GDAS, NAM, RAP, HRRR). The data can be provided in three formats: :term:`NEMSIO`, :term:`netCDF`, or :term:`GRIB2`. +The SRW Application currently supports raw initial and lateral boundary conditions from numerous models (i.e., FV3GFS, GEFS, GDAS, NAM, RAP, HRRR, RRFS). The data can be provided in three formats: :term:`NEMSIO`, :term:`netCDF`, or :term:`GRIB2`. To download the model input data for the 12-hour "out-of-the-box" experiment configuration in ``config.community.yaml`` file, run: .. code-block:: console - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/current_srw_release_data/gst_data.tgz + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/experiment-user-cases/release-public-v2.2.0/out-of-the-box/gst_data.tgz tar -xzf gst_data.tgz To download data for different dates, model types, and formats, users can explore the ``input_model_data`` section of the data bucket and replace the links above with ones that fetch their desired data. @@ -273,7 +274,7 @@ The paths to ``EXTRN_MDL_SOURCE_BASEDIR_ICS`` and ``EXTRN_MDL_SOURCE_BASEDIR_LBC USE_USER_STAGED_EXTRN_FILES: true EXTRN_MDL_SOURCE_BASEDIR_LBCS: /path/to/ufs-srweather-app/input_model_data/FV3GFS/grib2/YYYYMMDDHH -The two ``EXTRN_MDL_SOURCE_BASEDIR_*CS`` variables describe where the :term:`IC ` and :term:`LBC ` file directories are located, respectively. For ease of reusing ``config.yaml`` across experiments, it is recommended that users set up the raw :term:`IC/LBC ` file paths to include the model name (e.g., FV3GFS, GEFS, GDAS, NAM, RAP, HRRR), data format (e.g., grib2, nemsio), and date (in ``YYYYMMDDHH`` format). For example: ``/path/to/input_model_data/FV3GFS/grib2/2019061518/``. While there is flexibility to modify these settings, this structure will provide the most reusability for multiple dates when using the SRW Application workflow. +The two ``EXTRN_MDL_SOURCE_BASEDIR_*CS`` variables describe where the :term:`IC ` and :term:`LBC ` file directories are located, respectively. For ease of reusing ``config.yaml`` across experiments, it is recommended that users set up the raw :term:`IC/LBC ` file paths to include the model name (e.g., FV3GFS, GEFS, GDAS, NAM, RAP, HRRR, RRFS), data format (e.g., grib2, nemsio, netcdf), and date (in ``YYYYMMDDHH`` format). For example: ``/path/to/input_model_data/FV3GFS/grib2/2019061518/``. While there is flexibility to modify these settings, this structure will provide the most reusability for multiple dates when using the SRW Application workflow. When files are pulled from NOAA :term:`HPSS` (rather than downloaded from the data bucket), the naming convention looks something like this: @@ -290,11 +291,12 @@ When files are pulled from NOAA :term:`HPSS` (rather than downloaded from the da * RAP (GRIB2): ``rap.t{cycle}z.wrfprsf{fhr}.grib2`` * HRRR (GRIB2): ``hrrr.t{cycle}z.wrfprsf{fhr}.grib2`` +* RRFS (GRIB2): ``rrfs.t{cycle}z.prslev.f{fhr}.conus.grib2`` where: * ``{cycle}`` corresponds to the 2-digit hour of the day when the forecast cycle starts, and - * ``{fhr}`` corresponds to the 2- or 3-digit nth hour of the forecast (3-digits for FV3GFS/GDAS data and 2 digits for RAP/HRRR data). + * ``{fhr}`` corresponds to the 2- or 3-digit nth hour of the forecast (3-digits for FV3GFS/GDAS/RRFS data and 2 digits for RAP/HRRR data). For example, a forecast using FV3GFS GRIB2 data that starts at 18h00 UTC would have a ``{cycle}`` value of 18, which is the 000th forecast hour. The LBCS file for 21h00 UTC would be named ``gfs.t18z.pgrb2.0p25.f003``. @@ -318,7 +320,7 @@ Default Initial and Lateral Boundary Conditions ----------------------------------------------- The default initial and lateral boundary condition files are set to be a severe weather case from June 15, 2019 (20190615) at 18 UTC. FV3GFS GRIB2 files are the default model and file format. A tar file -(``gst_data.tgz``) containing the model data for this case is available in the `UFS SRW App Data Bucket `__. +(``gst_data.tgz``) containing the model data for this case is available in the `UFS SRW App Data Bucket `__. Running the App for Different Dates ----------------------------------- @@ -353,6 +355,8 @@ AWS S3 Data Buckets: * GDAS: https://registry.opendata.aws/noaa-gfs-bdp-pds/ * HRRR: https://registry.opendata.aws/noaa-hrrr-pds/ (necessary fields for initializing available for dates 2015 and newer) * A list of the NOAA Open Data Dissemination (NODD) datasets can be found here: https://www.noaa.gov/nodd/datasets +* RRFS - experimental data is available starting 02/01/2024 for deteministic forecasts starting hourly. Forecast data are available out to 60 hours for 00, 06, 12, and 18 UTC starting times (cycles), and out to 18 hours for other cycles. Earlier dates, from 05/01/2023 to 01/31/2024, may contain only forecasts at 00, 06, 12, 18 UTC; user needs to verify that data exist for needed dates. + https://noaa-rrfs-pds.s3.amazonaws.com/index.html#rrfs_a/ NCEI Archive: @@ -364,11 +368,6 @@ Google Cloud: * HRRR: https://console.cloud.google.com/marketplace/product/noaa-public/hrrr -FTP Data Repository (data for SRW Release v1.0.0 & v1.0.1): - -* https://ftp.emc.ncep.noaa.gov/EIB/UFS/SRW/v1p0/fix/ -* https://ftp.emc.ncep.noaa.gov/EIB/UFS/SRW/v1p0/simple_test_case/ - Others: * University of Utah HRRR archive: https://home.chpc.utah.edu/~u0553130/Brian_Blaylock/cgi-bin/hrrr_download.cgi diff --git a/doc/UsersGuide/source/CustomizingTheWorkflow/LAMGrids.rst b/doc/UsersGuide/CustomizingTheWorkflow/LAMGrids.rst similarity index 99% rename from doc/UsersGuide/source/CustomizingTheWorkflow/LAMGrids.rst rename to doc/UsersGuide/CustomizingTheWorkflow/LAMGrids.rst index 1fd163e8c6..482caf8590 100644 --- a/doc/UsersGuide/source/CustomizingTheWorkflow/LAMGrids.rst +++ b/doc/UsersGuide/CustomizingTheWorkflow/LAMGrids.rst @@ -75,7 +75,7 @@ The 3-km CONUS domain is ideal for running the ``FV3_RRFS_v1beta`` physics suite The boundary of the ``RRFS_CONUS_3km`` domain is shown in :numref:`Figure %s ` (in red), and the boundary of the :ref:`write component grid ` sits just inside the computational domain (in blue). This extra grid is required because the post-processing utility (:term:`UPP`) is unable to process data on the native FV3 gnomonic grid (in red). Therefore, model data are interpolated to a Lambert conformal grid (the write component grid) in order for the :term:`UPP` to read in and correctly process the data. .. note:: - While it is possible to initialize the FV3-LAM with coarser external model data when using the ``RRFS_CONUS_3km`` domain, it is generally advised to use external model data (such as HRRR or RAP data) that has a resolution similar to that of the native FV3-LAM (predefined) grid. + While it is possible to initialize the FV3-LAM with coarser external model data when using the ``RRFS_CONUS_3km`` domain, it is generally advised to use external model data (such as HRRR, RRFS, or RAP data) that has a resolution similar to that of the native FV3-LAM (predefined) grid. Predefined SUBCONUS Grid Over Indianapolis diff --git a/doc/UsersGuide/source/CustomizingTheWorkflow/TemplateVars.rst b/doc/UsersGuide/CustomizingTheWorkflow/TemplateVars.rst similarity index 100% rename from doc/UsersGuide/source/CustomizingTheWorkflow/TemplateVars.rst rename to doc/UsersGuide/CustomizingTheWorkflow/TemplateVars.rst diff --git a/doc/UsersGuide/source/CustomizingTheWorkflow/index.rst b/doc/UsersGuide/CustomizingTheWorkflow/index.rst similarity index 100% rename from doc/UsersGuide/source/CustomizingTheWorkflow/index.rst rename to doc/UsersGuide/CustomizingTheWorkflow/index.rst diff --git a/doc/UsersGuide/source/Reference/FAQ.rst b/doc/UsersGuide/Reference/FAQ.rst similarity index 96% rename from doc/UsersGuide/source/Reference/FAQ.rst rename to doc/UsersGuide/Reference/FAQ.rst index 21bef328a3..e8c3df0dec 100644 --- a/doc/UsersGuide/source/Reference/FAQ.rst +++ b/doc/UsersGuide/Reference/FAQ.rst @@ -20,34 +20,48 @@ Building the SRW App How can I clean up the SRW App code if something went wrong during the build? =============================================================================== -The ``ufs-srweather-app`` repository contains a ``devclean.sh`` convenience script. This script can be used to clean up code if something goes wrong when checking out externals or building the application. To view usage instructions and to get help, run with the ``-h`` flag: +The ``ufs-srweather-app`` repository contains a ``devclean.sh`` convenience script. This script can be used to clean up code if something goes wrong when checking out externals or building the application. To view usage instructions and to get help, run with the ``-h`` or ``--help`` flag: .. code-block:: console ./devclean.sh -h -To remove the ``build`` directory, run: +To remove all the build artifacts and directories except conda installation, use the ``-b`` or ``--build`` flag: .. code-block:: console - ./devclean.sh --remove + ./devclean.sh --build -To remove all build artifacts (including ``build``, ``exec``, ``lib``, and ``share``), run: +When using a containerized approach of running the SRW, use the ``--container`` option that will make sure to remove ``container-bin`` directory in lieu of the ``exec``, i.e.: .. code-block:: console - ./devclean.sh --clean + ./devclean.sh -b --container + +To remove only conda directory and conda_loc file in the main SRW directory, run with the ``-c`` or ``--conda`` flag: + +.. code-block:: console + + ./devclean.sh --conda OR - ./devclean.sh -a + ./devclean.sh -c -To remove external submodules, run: +To remove external submodules, run with the ``-s`` or ``--sub-modules`` flag: .. code-block:: console ./devclean.sh --sub-modules +To remove all build artifacts, conda and submodules (equivalent to \`-b -c -s\`), run with the ``-a`` or ``--all`` flag: + +.. code-block:: console + + ./devclean.sh --all + + Users will need to check out the external submodules again before building the application. + In addition to the options above, many standard terminal commands can be run to remove unwanted files and directories (e.g., ``rm -rf expt_dirs``). A complete explanation of these options is beyond the scope of this User's Guide. =========================== diff --git a/doc/UsersGuide/source/Reference/Glossary.rst b/doc/UsersGuide/Reference/Glossary.rst similarity index 97% rename from doc/UsersGuide/source/Reference/Glossary.rst rename to doc/UsersGuide/Reference/Glossary.rst index dc1f810306..2612d4fbe8 100644 --- a/doc/UsersGuide/source/Reference/Glossary.rst +++ b/doc/UsersGuide/Reference/Glossary.rst @@ -90,7 +90,7 @@ Glossary `Earth System Modeling Framework `__. The ESMF defines itself as “a suite of software tools for developing high-performance, multi-component Earth science modeling applications.” ex-scripts - Scripting layer (contained in ``ufs-srweather-app/scripts/``) that should be called by a :term:`J-job ` for each workflow componentto run a specific task or sub-task in the workflow. The different scripting layers are described in detail in the `NCO Implementation Standards document `__ + Scripting layer (contained in ``ufs-srweather-app/scripts/``) that should be called by a :term:`J-job ` for each workflow componentto run a specific task or sub-task in the workflow. The different scripting layers are described in detail in the :nco:`NCO Implementation Standards document ` FV3 The Finite-Volume Cubed-Sphere :term:`dynamical core` (dycore). Developed at NOAA's `Geophysical @@ -133,7 +133,7 @@ Glossary Initial conditions J-jobs - Scripting layer (contained in ``ufs-srweather-app/jobs/``) that should be directly called for each workflow component (either on the command line or by the workflow manager) to run a specific task in the workflow. The different scripting layers are described in detail in the `NCO Implementation Standards document `__ + Scripting layer (contained in ``ufs-srweather-app/jobs/``) that should be directly called for each workflow component (either on the command line or by the workflow manager) to run a specific task in the workflow. The different scripting layers are described in detail in the :nco:`NCO Implementation Standards document ` JEDI The Joint Effort for Data assimilation Integration (`JEDI `__) is a unified and versatile data assimilation (DA) system for Earth System Prediction. It aims to enable efficient research and accelerated transition from research to operations by providing a framework that takes into account all components of the Earth system in a consistent manner. The JEDI software package can run on a variety of platforms and for a variety of purposes, and it is designed to readily accommodate new atmospheric and oceanic models and new observation systems. The `JEDI User's Guide `__ contains extensive information on the software. @@ -227,7 +227,8 @@ Glossary A central location in which files (e.g., data, code, documentation) are stored and managed. RRFS - The `Rapid Refresh Forecast System `__ (RRFS) is NOAA's next-generation convection-allowing, rapidly-updated, ensemble-based data assimilation and forecasting system currently scheduled for operational implementation in 2024. It is designed to run forecasts on a 3-km :term:`CONUS` domain. + The `Rapid Refresh Forecast System `__ (RRFS) is NOAA's next-generation convection-allowing, rapidly-updated, ensemble-based data assimilation and forecasting system currently scheduled for operational implementation in 2024. It is designed to run forecasts on a 3-km :term:`CONUS` domain, see also `NOAA Rapid Refresh Forecast System (RRFS) `__. Experimental data is currently available from the `AWS S3 NOAA-RRFS `__ bucket for deterministic forecasts out to 60 hours at 00, 06, 12, and 18 UTC. Additionally, hourly forecasts out to 18 hours may be available for more recent RRFS model runs; the user needs to verify that data exists for needed dates. + SDF Suite Definition File. An external file containing information about the construction of a physics suite. It describes the schemes that are called, in which order they are called, whether they are subcycled, and whether they are assembled into groups to be called together. diff --git a/doc/UsersGuide/source/Reference/RocotoInfo.rst b/doc/UsersGuide/Reference/RocotoInfo.rst similarity index 100% rename from doc/UsersGuide/source/Reference/RocotoInfo.rst rename to doc/UsersGuide/Reference/RocotoInfo.rst diff --git a/doc/UsersGuide/source/Reference/index.rst b/doc/UsersGuide/Reference/index.rst similarity index 100% rename from doc/UsersGuide/source/Reference/index.rst rename to doc/UsersGuide/Reference/index.rst diff --git a/doc/UsersGuide/source/SSHIntro.rst b/doc/UsersGuide/SSHIntro.rst similarity index 100% rename from doc/UsersGuide/source/SSHIntro.rst rename to doc/UsersGuide/SSHIntro.rst diff --git a/doc/UsersGuide/index.rst b/doc/UsersGuide/index.rst new file mode 100644 index 0000000000..58c6fe6089 --- /dev/null +++ b/doc/UsersGuide/index.rst @@ -0,0 +1,10 @@ +User's Guide +============== + +.. toctree:: + :maxdepth: 3 + + BackgroundInfo/index + BuildingRunningTesting/index + CustomizingTheWorkflow/index + Reference/index diff --git a/doc/UsersGuide/source/_static/theme_overrides.css b/doc/UsersGuide/source/_static/theme_overrides.css deleted file mode 100644 index a9672944ba..0000000000 --- a/doc/UsersGuide/source/_static/theme_overrides.css +++ /dev/null @@ -1,24 +0,0 @@ -/* override table width restrictions */ -@media screen and (min-width: 767px) { - - .wy-table-responsive table td { - /* !important prevents the common CSS stylesheets from overriding - this as on RTD they are loaded after this stylesheet */ - white-space: normal !important; - } - - .wy-nav-content { - max-width: 100% !important; - } - - /* .wy-table-responsive { */ - /* overflow: visible !important; */ - /* } */ - -} - -/* Darken navbar blue background for contrast with logo */ -.wy-side-nav-search, .wy-nav-top { - background: #2779B0; -} - diff --git a/doc/UsersGuide/source/index.rst b/doc/UsersGuide/source/index.rst deleted file mode 100644 index 8f975b7cd1..0000000000 --- a/doc/UsersGuide/source/index.rst +++ /dev/null @@ -1,16 +0,0 @@ -.. UFS SR Weather App Users Guide, created by - sphinx-quickstart on Tue Feb 12 08:48:32 2019. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -UFS Short-Range Weather App User's Guide (|version|) -===================================================== - -.. toctree:: - :numbered: - :maxdepth: 3 - - BackgroundInfo/index - BuildingRunningTesting/index - CustomizingTheWorkflow/index - Reference/index diff --git a/doc/UsersGuide/source/tables/fix_file_list.rst b/doc/UsersGuide/source/tables/fix_file_list.rst deleted file mode 100644 index a20bd39245..0000000000 --- a/doc/UsersGuide/source/tables/fix_file_list.rst +++ /dev/null @@ -1,821 +0,0 @@ -:orphan: - -.. _StaticFilesList: - - -Static Files for SRW App Release v2.1.0 -========================================== - -``fix_aer`` Files ---------------------- - -.. code-block:: console - - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m01.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m02.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m03.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m04.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m06.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m07.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m08.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m09.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m10.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m11.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2.aerclim.2003-2014.m12.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m01.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m02.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m03.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m04.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m06.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m07.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m08.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m09.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m10.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m11.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_aer/merra2C.aerclim.2003-2014.m12.nc - -``fix_am`` Files ---------------------- - -.. code-block:: console - - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/CCN_ACTIVATE.BIN - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/cfs_ice1x1monclim19822001.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/cfs_oi2sst1x1monclim19822001.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/cfs_v2_soilmcpc.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/CFSR.OISST.1982.2010.monthly.clim - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/CFSR.OISST.1999.2012.monthly.clim.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/CFSR.SEAICE.1982.2010.monthly.clim - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/CFSR.SEAICE.1982.2012.monthly.clim.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2monthlycyc.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/emcsfc_gland5min.grib2 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/emcsfc_snow_cover.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/emcsfc_snow_cover_climo.grib2 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/freezeH2O.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/geo_em.d01.lat-lon.2.5m.HGT_M.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/geo_em.d01.nc_HRRR_AK - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/geo_em.d01.nc_HRRRX - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/geo_em.d01.nc_RAPX - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_1x1_paramlist - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_1x1_paramlist.anl - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_1x1_paramlist.f00 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeroinfo.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m01.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m02.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m03.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m04.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m05.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m06.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m07.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m08.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m09.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m10.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m11.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_aeropac3a.m12.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_albedo4.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_cldtune.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_climaeropac_global.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2con.l28.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2con.l42.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2con.l64.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1956.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1957.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1958.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1959.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1960.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1961.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1962.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1963.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1964.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1965.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1966.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1967.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1968.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1969.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1970.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1971.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1972.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1973.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1974.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1975.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1976.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1977.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1978.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1979.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1980.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1981.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1982.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1983.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1984.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1985.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1986.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1987.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1988.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1989.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1990.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1991.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1992.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1993.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1994.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1995.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1996.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1997.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1998.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_1999.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2000.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2001.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2002.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2003.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2004.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2005.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2006.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2007.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2008.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2009.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2010.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2011.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2012.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_2013.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2historicaldata_glob.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2monthlycyc1976_2006.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2monthlycyc1976_2007.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_co2monthlycyc1976_2009.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_divten.l28.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_divten.l42.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_divten.l64.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_emissivity_coefs.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_gaussian_latitudes.t1148.2304.1152.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_gaussian_latitudes.t1534.3072.1536.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_gaussian_latitudes.t574.1152.576.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_gaussian_latitudes.t670.1344.672.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_gaussian_latitudes.t766.1536.768.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_gaussian_latitudes.t94.192.96.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_glacier.2x2.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_h2o_pltc.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hd_paramlist - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hd_paramlist.f00 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l128.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l128C.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l150.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l28.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l42.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l60.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l64.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l64sl.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l65.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l65.txt_0.1hPa - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l91.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev.l98.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev3.l28.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev3.l42.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev3.l60.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_hyblev3.l64.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_iceclim.2x2.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_coeff_hflux.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_coeff_lflux.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_coeff_lte.150 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_coeff_lte.360 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_coeff_lte.540 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_coeff_lte.720 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_ggww_in1.par - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_ggww_in4.par - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_h2ort_kg7t.par - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_h2ovb_kg7t.par - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_idea_wei96.cofcnts - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_kplist.1d.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_kplist.hd.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_kplist.master.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_latitudes.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_longitudes.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t1148.2304.1152.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t126.384.190.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t1534.3072.1536.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t170.512.256.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t190.384.192.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t190.576.288.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t254.512.256.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t254.768.384.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t3070.6144.3072.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t382.1152.576.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t382.768.384.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t574.1152.576.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t574.1760.880.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t62.192.94.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t670.1344.672.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t766.1536.768.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t878.1760.880.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t878.2640.1320.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t92.192.94.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_lonsperlat.t94.192.96.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_maskh.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_master-catchup_parmlist - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_maxice.2x2.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t1148.2304.1152.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t126.384.190.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t126.384.190.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t1534.3072.1536.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t1534.3072.1536.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t170.512.256.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t190.384.192.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t190.384.192.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t190.576.288.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t254.512.256.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t254.512.256.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t254.768.384.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t382.1152.576.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t382.768.384.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t382.768.384.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t574.1152.576.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t574.1152.576.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t574.1760.880.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t62.192.94.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t670.1344.672.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t670.1344.672.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t766.1536.768.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t878.1760.880.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t878.2640.1320.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t92.192.94.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mtnvar.t92.192.94.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t126.384.190.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t1534.3072.1536.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t190.384.192.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t190.576.288.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t254.512.256.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t382.768.384.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t574.1152.576.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t62.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t670.1344.672.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t766.1536.768.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t766.1536.768.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t92.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_mxsnoalb.uariz.t94.192.96.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_npoess_paramlist - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_o3clim.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_o3prdlos.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t126.384.190.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t126.384.190.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t1534.3072.1536.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t1534.3072.1536.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t190.384.192.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t190.384.192.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t254.512.256.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t254.512.256.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t382.768.384.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t382.768.384.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t574.1152.576.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t574.1152.576.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t62.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t670.1344.672.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t670.1344.672.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t766.1536.768.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t766.1536.768.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t92.192.94.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography.t92.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t126.384.190.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t126.384.190.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t1534.3072.1536.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t1534.3072.1536.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t190.384.192.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t190.384.192.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t254.512.256.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t254.512.256.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t382.768.384.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t382.768.384.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t574.1152.576.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t574.1152.576.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t62.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t670.1344.672.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t670.1344.672.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t766.1536.768.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t766.1536.768.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t92.192.94.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_uf.t92.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_orography_0.5x0.5.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_salclm.t1534.3072.1536.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_sfc_emissivity_idx.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_shdmax.0.144x0.144.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_shdmax.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_shdmin.0.144x0.144.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_shdmin.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_siglevel.l28.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_siglevel.l42.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_siglevel.l64.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t126.384.190.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t126.384.190.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t1534.3072.1536.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t1534.3072.1536.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t190.384.192.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t190.384.192.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t254.512.256.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t254.512.256.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t382.768.384.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t382.768.384.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t574.1152.576.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t574.1152.576.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t62.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t670.1344.672.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t670.1344.672.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t766.1536.768.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t766.1536.768.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t766.1536.768.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t92.192.94.rg.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slmask.t92.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slope.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_slptyp.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snoalb.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snoalb.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snoclim.1.875.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t126.384.190.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t1534.3072.1536.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t190.384.192.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t190.576.288.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t254.512.256.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t382.768.384.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t574.1152.576.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t62.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t670.1344.672.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t766.1536.768.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t766.1536.768.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t92.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_snowfree_albedo.bosu.t94.192.96.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmcpc.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t766.1536.768.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.statsgo.t94.192.96.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t190.576.288.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t766.1536.768.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soilmgldas.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t126.384.190.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t1534.3072.1536.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t190.384.192.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t190.576.288.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t254.512.256.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t382.768.384.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t574.1152.576.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t62.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t670.1344.672.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t766.1536.768.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t766.1536.768.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t92.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_soiltype.statsgo.t94.192.96.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstant_cmip_an.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstant_cmip_mn.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstant_noaa_a0.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstant_noaa_an.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstant_noaa_an.txt_v2011 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstant_noaa_an.txt_v2019 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_solarconstantdata.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_spectral_coefs.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_sstclim.2x2.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_tbthe.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_tg3clim.2.6x1.5.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_transmittance_coefs.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vars.l28.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vars.l42.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vars.l64.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegfrac.0.144.decpercent.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegfrac.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t1148.2304.1152.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t126.384.190.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t126.384.190.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t1534.3072.1536.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t1534.3072.1536.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t170.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t190.384.192.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t190.384.192.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t190.576.288.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t190.576.288.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t254.512.256.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t254.512.256.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t254.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t382.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t382.768.384.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t382.768.384.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t574.1152.576.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t574.1152.576.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t574.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t62.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t62.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t670.1344.672.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t670.1344.672.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t766.1536.768.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t766.1536.768.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t878.1760.880.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t878.2640.1320.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t92.192.94.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t92.192.94.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_vegtype.igbp.t94.192.96.rg.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1850-1859.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1860-1869.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1870-1879.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1880-1889.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1890-1899.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1900-1909.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1910-1919.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1920-1929.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1930-1939.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1940-1949.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1950-1959.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1960-1969.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1970-1979.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1980-1989.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_volcanic_aerosols_1990-1999.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/global_zorclim.1x1.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/HGT.Beljaars_filtered.lat-lon.30s_res.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/latlon_grid3.32769.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/ozone.clim - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/qr_acr_qg.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/qr_acr_qgV2.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/qr_acr_qs.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/qr_acr_qsV2.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/rrtmgp-cloud-optics-coeffs-lw.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/rrtmgp-cloud-optics-coeffs-sw.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/rrtmgp-data-lw-g256-2018-12-04.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/rrtmgp-data-sw-g224-2018-12-04.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/rrtmgp-lw-prototype-g128-210413.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/rrtmgp-sw-prototype-g131-210413.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/RTGSST.1982.2012.monthly.clim.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/seaice_newland.grb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/syndat_fildef.vit - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/syndat_slmask.t126.gaussian - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/syndat_stmnames - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/syndat_stmnames_old - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/syndat_stmnames_old1 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/syndat_stmnames_old2 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/Thompson_MP_MONTHLY_CLIMO.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/ugwp_limb_tau.nc - - -``fix_am/co2dat_4a/`` Files: -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. code-block:: console - - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1956.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1957.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1958.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1959.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1960.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1961.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1962.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1963.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1964.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1965.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1966.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1967.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1968.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1969.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1970.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1971.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1972.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1973.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1974.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1975.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1976.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1977.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1978.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1979.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1980.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1981.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1982.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1983.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1984.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1985.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1986.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1987.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1988.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1989.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1990.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1991.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1992.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1993.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1994.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1995.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1996.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1997.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1998.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_1999.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2000.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2001.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2002.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2003.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2004.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2005.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2006.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2007.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2008.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2009.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2009.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2009.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2010.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2010.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2010.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2011.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2011.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2011.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2012.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2012.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2012.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2013.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2013.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2013.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2014.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2014.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2014.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2015.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2015.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2015.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2016.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2016.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2016.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2017.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2017.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2017.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2018.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2018.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2018.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2019.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2019.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2019.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2020.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2020.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2020.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2021.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2021.txt_proj_u - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_2022.txt_proj - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2historicaldata_glob.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2monthlycyc1976_2006.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/global_co2monthlycyc1976_2009.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/co2dat_4a/MEMO - - -``fix_am/fix_co2_proj`` Files: -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. code-block:: console - - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2009.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2010.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2011.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2012.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2013.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2014.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2015.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2016.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2017.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2018.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2019.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2020.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2021.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_proj/global_co2historicaldata_2022.txt - - -``fix_am/fix_co2_update`` Files: -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. code-block:: console - - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2009.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2010.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2011.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2012.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2013.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2014.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2015.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2016.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2017.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2018.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2019.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2020.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_am/fix_co2_update/global_co2historicaldata_2021.txt - - -``fix_lut`` Files ---------------------- - -.. code-block:: console - - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_lut/optics_BC.v1_3.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_lut/optics_DU.v15_3.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_lut/optics_DU.v15_3.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_lut/optics_OC.v1_3.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_lut/optics_SS.v3_3.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_lut/optics_SU.v1_3.dat - - -``fix_orog`` Files ---------------------- - -.. code-block:: console - - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/clmgrb - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/clmgrb.index - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/convert.f90 - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/gmted2010.30sec.flt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/gmted2010.30sec.int - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/gmted2010.30sec.flt.ctl - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/gmted2010.30sec.int.ctl - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/thirty.second.antarctic.new.bin - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/GlobalLakeDepth.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/GlobalLakeDepth.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/GlobalLakeStatus.dat - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/GlobalLakeStatus.txt - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/gtopo30_gg.fine - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/gtopo30_gg.fine.nh - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/landcover30.fixed - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/makefile - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/run.lsf - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/TOP8M_avg.20I4.asc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/TOP8M_max.20I4.asc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_orog/TOP8M_slm.80I1.asc - - - -``fix_sfc_climo`` Files --------------------------- - -.. code-block:: console - - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/facsf.1.0.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.igbp.0.03.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/leaf_area_index.0.05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.igbp.0.05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/maximum_snow_albedo.0.05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.igbp.conus.0.01.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/slope_type.1.0.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.modis.igbp.0.03.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/snowfree_albedo.4comp.0.05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.modis.igbp.0.05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/soil_type.statsgo.0.03.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.modis.igbp.conus.0.01.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/soil_type.statsgo.0.05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.viirs.igbp.0.03.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/soil_type.statsgo.conus.0.01.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.viirs.igbp.0.05.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/substrate_temperature.1.0.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.viirs.igbp.0.1.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/substrate_temperature.2.6x1.5.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_type.viirs.igbp.conus.0.01.nc - wget https://noaa-ufs-srw-pds.s3.amazonaws.com/fix/fix_sfc_climo/vegetation_greenness.0.144.nc - - diff --git a/doc/UsersGuide/source/_static/custom.css b/doc/_static/custom.css similarity index 100% rename from doc/UsersGuide/source/_static/custom.css rename to doc/_static/custom.css diff --git a/doc/_static/theme_overrides.css b/doc/_static/theme_overrides.css new file mode 100644 index 0000000000..a4e5cab82f --- /dev/null +++ b/doc/_static/theme_overrides.css @@ -0,0 +1,26 @@ +.wy-table-responsive table td { + /* !important prevents the common CSS stylesheets from overriding + this as on RTD they are loaded after this stylesheet */ + white-space: normal !important; +} + +.wy-nav-content { + max-width: 100% !important; +} + +.wy-table-responsive { + overflow: visible !important; +} + +/* Darken background for contrast with logo */ +.wy-side-nav-search, .wy-nav-top { + background: #2779B0; +} + +/* Darken navbar blue background for contrast with logo */ +.wy-side-nav-search, .wy-nav-top { + background: #2779B0; +} + + + diff --git a/doc/UsersGuide/source/_templates/.gitignore b/doc/_templates/.gitignore similarity index 100% rename from doc/UsersGuide/source/_templates/.gitignore rename to doc/_templates/.gitignore diff --git a/doc/UsersGuide/source/conf.py b/doc/conf.py similarity index 85% rename from doc/UsersGuide/source/conf.py rename to doc/conf.py index cc348f18da..6b0f461ba8 100644 --- a/doc/UsersGuide/source/conf.py +++ b/doc/conf.py @@ -14,7 +14,8 @@ # import os import sys -sys.path.insert(0, os.path.abspath('.')) +import sphinx +from sphinx.util import logging @@ -32,37 +33,10 @@ numfig = True -# Avoid a 403 Forbidden error when accessing certain links (e.g., noaa.gov) -user_agent = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36" - -# Ignore working links that cause a linkcheck 403 error. -linkcheck_ignore = [r'https://www\.intel\.com/content/www/us/en/docs/cpp\-compiler/developer\-guide\-reference/2021\-10/thread\-affinity\-interface\.html', - r'https://www\.intel\.com/content/www/us/en/developer/tools/oneapi/hpc\-toolkit\-download\.html', - ] - -# Ignore anchor tags for SRW App data bucket. Shows Not Found even when they exist. -linkcheck_anchors_ignore = [r"current_srw_release_data/", - r"input_model_data/.*", - r"fix.*", - r"sample_cases/.*", - ] - -linkcheck_allowed_redirects = {r"https://github\.com/ufs-community/ufs-srweather-app/wiki/.*": r"https://raw\.githubusercontent\.com/wiki/ufs-community/ufs-srweather-app/.*", - r"https://github\.com/ufs-community/ufs-srweather-app/issues/new/choose": r"https://github\.com/login", - r"https://doi\.org/.*/zenodo\..*": r"https://zenodo\.org/records/.*", - r"https://doi\.org/.*": r"https://gmd\.copernicus\.org/.*", - r"https://rdhpcs\-common\-docs\.rdhpcs\.noaa\.gov/wiki/index\.php/Transferring\_Data": - r"https://sso\.noaa\.gov\:443/openam/SSORedirect/metaAlias/noaa\-online/idp\?SAMLRequest\=.*" - } # -- General configuration --------------------------------------------------- -# If your documentation needs a minimal Sphinx version, state it here. -# needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. +# Sphinx extension module names: extensions = [ 'sphinx_rtd_theme', 'sphinx.ext.autodoc', @@ -78,7 +52,6 @@ ] bibtex_bibfiles = ['references.bib'] -#bibtex_bibfiles = ['refs.bib'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] @@ -99,7 +72,8 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] +exclude_patterns = ['_build', + '.DS_Store',] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' @@ -117,6 +91,37 @@ .. |data| replace:: develop """ +# Linkcheck options + +# Avoid a 403 Forbidden error when accessing certain links (e.g., noaa.gov) +user_agent = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36" + +# Ignore working links that cause a linkcheck 403 error. +linkcheck_ignore = [r'https://www\.intel\.com/content/www/us/en/docs/cpp\-compiler/developer\-guide\-reference/2021\-10/thread\-affinity\-interface\.html', + r'https://www\.intel\.com/content/www/us/en/developer/tools/oneapi/hpc\-toolkit\-download\.html', + #r'https://glossary.ametsoc.org/.*', + ] + +# Ignore anchor tags for SRW App data bucket. Shows Not Found even when they exist. +linkcheck_anchors_ignore = [r"current_srw_release_data/", + r"input_model_data/.*", + r"fix.*", + r"sample_cases/.*", + ] + +linkcheck_allowed_redirects = {r"https://github\.com/ufs-community/ufs-srweather-app/wiki/.*": + r"https://raw\.githubusercontent\.com/wiki/ufs-community/ufs-srweather-app/.*", + r"https://github\.com/ufs-community/ufs-srweather-app/issues/new/choose": + r"https://github\.com/login", + r"https://doi\.org/.*/zenodo\..*": r"https://zenodo\.org/records/.*", + r"https://doi\.org/.*": r"https://gmd\.copernicus\.org/.*", + r"https://rdhpcs\-common\-docs\.rdhpcs\.noaa\.gov/wiki/index\.php/Transferring\_Data": + r"https://sso\.noaa\.gov\:443/openam/SSORedirect/metaAlias/noaa\-online/idp\?SAMLRequest\=.*", + r"https://github\.com/ufs-community/ufs\-srweather\-app/issues/.*": + r"https://github\.com/login\?return\_to\=https.*", + } + + # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for @@ -124,6 +129,7 @@ # html_theme = 'sphinx_rtd_theme' html_theme_path = ["_themes", ] +html_logo= "https://github.com/ufs-community/ufs/wiki/images/ufs-epic-logo.png" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -132,7 +138,7 @@ # html_theme_options = {} html_theme_options = { "body_max_width": "none", - "navigation_depth": 6, + "navigation_depth": 8, } # Add any paths that contain custom static files (such as style sheets) here, @@ -191,7 +197,7 @@ def setup(app): # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'UFS-SRWeatherApp.tex', 'UFS Short-Range Weather App Users Guide', + (master_doc, 'UFS-SRWeatherApp.tex', 'UFS Short-Range Weather App Documentation', ' ', 'manual'), ] @@ -201,7 +207,7 @@ def setup(app): # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (master_doc, 'UFS-SRWeatherApp', 'UFS Short-Range Weather App Users Guide', + (master_doc, 'UFS-SRWeatherApp', 'UFS Short-Range Weather App Documentation', [author], 1) ] @@ -253,16 +259,16 @@ def setup(app): 'ccpp-techdoc': ('https://ccpp-techdoc.readthedocs.io/en/ufs_srw_app_v2.2.0/', None), 'stochphys': ('https://stochastic-physics.readthedocs.io/en/latest/', None), 'srw_v2.2.0': ('https://ufs-srweather-app.readthedocs.io/en/release-public-v2.2.0/', None), - 'uw': ('https://uwtools.readthedocs.io/en/main', None), } # -- Options for extlinks extension --------------------------------------- extlinks_detect_hardcoded_links = True -extlinks = {'srw-wiki': ('https://github.com/ufs-community/ufs-srweather-app/wiki/%s','%s'), - } - -# -- Options for todo extension ---------------------------------------------- - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True +extlinks = {'github-docs': ('https://docs.github.com/en/%s', '%s'), + 'nco': ('https://www.nco.ncep.noaa.gov/idsb/implementation_standards/%s', '%s'), + "rst": ("https://www.sphinx-doc.org/en/master/usage/restructuredtext/%s", "%s"), + "rtd": ("https://readthedocs.org/projects/ufs-srweather-app/%s", "%s"), + 'srw-repo': ('https://github.com/ufs-community/ufs-srweather-app/%s', '%s'), + 'srw-wiki': ('https://github.com/ufs-community/ufs-srweather-app/wiki/%s','%s'), + 'uw': ('https://uwtools.readthedocs.io/en/main/%s', '%s'), + } \ No newline at end of file diff --git a/doc/index.rst b/doc/index.rst new file mode 100644 index 0000000000..c8cf2b32fc --- /dev/null +++ b/doc/index.rst @@ -0,0 +1,9 @@ +UFS Short-Range Weather App Documentation (|version|) +===================================================== + +.. toctree:: + :numbered: + :maxdepth: 3 + + UsersGuide/index + ContribGuide/index diff --git a/doc/UsersGuide/make.bat b/doc/make.bat similarity index 100% rename from doc/UsersGuide/make.bat rename to doc/make.bat diff --git a/doc/UsersGuide/source/references.bib b/doc/references.bib similarity index 100% rename from doc/UsersGuide/source/references.bib rename to doc/references.bib diff --git a/doc/UsersGuide/requirements.in b/doc/requirements.in similarity index 100% rename from doc/UsersGuide/requirements.in rename to doc/requirements.in diff --git a/doc/UsersGuide/requirements.txt b/doc/requirements.txt similarity index 89% rename from doc/UsersGuide/requirements.txt rename to doc/requirements.txt index 60c67635ea..e6d38a4eb8 100644 --- a/doc/UsersGuide/requirements.txt +++ b/doc/requirements.txt @@ -2,27 +2,27 @@ # This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pip-compile requirements.in +# pip-compile --strip-extras requirements.in # alabaster==0.7.16 # via sphinx babel==2.14.0 # via sphinx -certifi==2024.2.2 +certifi==2024.7.4 # via requests charset-normalizer==3.3.2 # via requests -docutils==0.19 +docutils==0.20.1 # via # pybtex-docutils # sphinx # sphinx-rtd-theme # sphinxcontrib-bibtex -idna==3.6 +idna==3.7 # via requests imagesize==1.4.1 # via sphinx -jinja2==3.1.3 +jinja2==3.1.4 # via sphinx latexcodec==2.0.1 # via pybtex @@ -40,7 +40,7 @@ pygments==2.17.2 # via sphinx pyyaml==6.0.1 # via pybtex -requests==2.31.0 +requests==2.32.2 # via sphinx six==1.16.0 # via @@ -58,7 +58,7 @@ sphinx-rtd-theme==2.0.0 # via -r requirements.in sphinxcontrib-applehelp==1.0.8 # via sphinx -sphinxcontrib-bibtex==2.5.0 +sphinxcontrib-bibtex==2.6.2 # via -r requirements.in sphinxcontrib-devhelp==1.0.6 # via sphinx @@ -72,5 +72,5 @@ sphinxcontrib-qthelp==1.0.7 # via sphinx sphinxcontrib-serializinghtml==1.1.10 # via sphinx -urllib3==2.2.0 +urllib3==2.2.2 # via requests diff --git a/doc/UsersGuide/source/tables/SRW_NATLEV_table.csv b/doc/tables/SRW_NATLEV_table.csv similarity index 100% rename from doc/UsersGuide/source/tables/SRW_NATLEV_table.csv rename to doc/tables/SRW_NATLEV_table.csv diff --git a/doc/UsersGuide/source/tables/SRW_NATLEV_table.rst b/doc/tables/SRW_NATLEV_table.rst similarity index 100% rename from doc/UsersGuide/source/tables/SRW_NATLEV_table.rst rename to doc/tables/SRW_NATLEV_table.rst diff --git a/doc/UsersGuide/source/tables/SRW_PRSLEV_table.csv b/doc/tables/SRW_PRSLEV_table.csv similarity index 100% rename from doc/UsersGuide/source/tables/SRW_PRSLEV_table.csv rename to doc/tables/SRW_PRSLEV_table.csv diff --git a/doc/UsersGuide/source/tables/SRW_PRSLEV_table.rst b/doc/tables/SRW_PRSLEV_table.rst similarity index 100% rename from doc/UsersGuide/source/tables/SRW_PRSLEV_table.rst rename to doc/tables/SRW_PRSLEV_table.rst diff --git a/doc/UsersGuide/source/tables/Tests.csv b/doc/tables/Tests.csv similarity index 100% rename from doc/UsersGuide/source/tables/Tests.csv rename to doc/tables/Tests.csv diff --git a/doc/UsersGuide/source/tables/Tests.rst b/doc/tables/Tests.rst similarity index 100% rename from doc/UsersGuide/source/tables/Tests.rst rename to doc/tables/Tests.rst diff --git a/doc/tables/code-managers.csv b/doc/tables/code-managers.csv new file mode 100644 index 0000000000..ec665b0fa7 --- /dev/null +++ b/doc/tables/code-managers.csv @@ -0,0 +1,21 @@ +Affiliation;Code Manager;Areas of Expertise +EPIC;**Michael Lueken (@MichaelLueken)**;Lead SRW App code manager +GSL;Daniel Abdi (@danielabdi-noaa);Workflow generation, testing RRFS on the cloud, environment modules +GSL;Jeff Beck (@JeffBeck-NOAA);SRW App configuration/workflow, code management, meteorological evaluation +EMC;Ben Blake (@BenjaminBlake-NOAA);Output visualization, Rocoto +EMC;Brian Curtis (@BrianCurtis-NOAA);Operational air quality modeling (Online-CMAQ), code management +GSL;Christopher Harrop (@christopherwharrop-noaa);Rocoto, code management, and testing +GSL;Christina Holt (@christinaholtNOAA);Workflow, conda environment support, testing, and code management +EPIC;Chan-Hoo Jeon (@chan-hoo);Air quality modeling (Online-CMAQ), NCO Implementation Standards, Workflow +EPIC;Jong Kim (@jkbk2004);UFS Weather Model configuration, forecast sensitivity analysis, data assimilation +NCAR;Mike Kavulich (@mkavulich);Workflow, CCPP/physics, code management, WE2E testing, verification +GSL;Gerard Ketefian (@gsketefian);Verification/METplus tasks, Jinja templates, and workflow scripts +NCAR;Will Mayfield (@willmayfield);Verification/METplus tasks +GSL;Linlin Pan (@panll);Workflow, CCPP/physics, and verification +EPIC;Natalie Perlin (@natalie-perlin);Generic Linux/Mac installations, hpc-stack/spack-stack +EPIC;Gillian Petro (@gspetro-NOAA);Documentation, User Support +EPIC;Mark Potts (@mark-a-potts);HPC systems +EPIC;Edward Snyder (@EdwardSnyder-NOAA);WE2E testing, input data +GLERL;David Wright (@dmwright526);FVCOM integration, output visualization, preprocessing tasks +EPIC;Ratko Vasic (@RatkoVasic-NOAA);Workflow, testing, and spack-stack maintenance +NSSL;Yunheng Wang (@ywangwof);HPC systems, code management, and regional workflow (especially on Stampede, Jet and NSSL computers) diff --git a/doc/tables/fix_file_list.rst b/doc/tables/fix_file_list.rst new file mode 100644 index 0000000000..628c124bc3 --- /dev/null +++ b/doc/tables/fix_file_list.rst @@ -0,0 +1,821 @@ +:orphan: + +.. _StaticFilesList: + + +Static Files for SRW App Release v2.1.0 +========================================== + +``fix_aer`` Files +--------------------- + +.. code-block:: console + + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m01.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m02.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m03.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m04.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m06.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m07.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m08.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m09.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m10.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m11.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2.aerclim.2003-2014.m12.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m01.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m02.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m03.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m04.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m06.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m07.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m08.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m09.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m10.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m11.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_aer/merra2C.aerclim.2003-2014.m12.nc + +``fix_am`` Files +--------------------- + +.. code-block:: console + + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/CCN_ACTIVATE.BIN + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/cfs_ice1x1monclim19822001.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/cfs_oi2sst1x1monclim19822001.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/cfs_v2_soilmcpc.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/CFSR.OISST.1982.2010.monthly.clim + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/CFSR.OISST.1999.2012.monthly.clim.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/CFSR.SEAICE.1982.2010.monthly.clim + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/CFSR.SEAICE.1982.2012.monthly.clim.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2monthlycyc.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/emcsfc_gland5min.grib2 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/emcsfc_snow_cover.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/emcsfc_snow_cover_climo.grib2 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/freezeH2O.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/geo_em.d01.lat-lon.2.5m.HGT_M.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/geo_em.d01.nc_HRRR_AK + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/geo_em.d01.nc_HRRRX + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/geo_em.d01.nc_RAPX + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_1x1_paramlist + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_1x1_paramlist.anl + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_1x1_paramlist.f00 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeroinfo.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m01.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m02.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m03.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m04.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m05.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m06.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m07.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m08.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m09.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m10.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m11.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_aeropac3a.m12.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_albedo4.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_cldtune.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_climaeropac_global.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2con.l28.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2con.l42.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2con.l64.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1956.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1957.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1958.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1959.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1960.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1961.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1962.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1963.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1964.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1965.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1966.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1967.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1968.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1969.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1970.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1971.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1972.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1973.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1974.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1975.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1976.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1977.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1978.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1979.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1980.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1981.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1982.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1983.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1984.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1985.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1986.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1987.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1988.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1989.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1990.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1991.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1992.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1993.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1994.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1995.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1996.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1997.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1998.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_1999.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2000.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2001.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2002.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2003.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2004.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2005.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2006.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2007.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2008.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2009.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2010.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2011.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2012.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_2013.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2historicaldata_glob.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2monthlycyc1976_2006.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2monthlycyc1976_2007.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_co2monthlycyc1976_2009.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_divten.l28.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_divten.l42.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_divten.l64.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_emissivity_coefs.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_gaussian_latitudes.t1148.2304.1152.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_gaussian_latitudes.t1534.3072.1536.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_gaussian_latitudes.t574.1152.576.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_gaussian_latitudes.t670.1344.672.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_gaussian_latitudes.t766.1536.768.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_gaussian_latitudes.t94.192.96.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_glacier.2x2.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_h2o_pltc.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hd_paramlist + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hd_paramlist.f00 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l128.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l128C.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l150.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l28.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l42.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l60.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l64.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l64sl.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l65.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l65.txt_0.1hPa + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l91.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev.l98.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev3.l28.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev3.l42.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev3.l60.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_hyblev3.l64.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_iceclim.2x2.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_coeff_hflux.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_coeff_lflux.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_coeff_lte.150 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_coeff_lte.360 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_coeff_lte.540 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_coeff_lte.720 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_ggww_in1.par + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_ggww_in4.par + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_h2ort_kg7t.par + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_h2ovb_kg7t.par + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_idea_wei96.cofcnts + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_kplist.1d.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_kplist.hd.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_kplist.master.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_latitudes.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_longitudes.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t1148.2304.1152.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t126.384.190.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t1534.3072.1536.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t170.512.256.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t190.384.192.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t190.576.288.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t254.512.256.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t254.768.384.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t3070.6144.3072.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t382.1152.576.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t382.768.384.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t574.1152.576.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t574.1760.880.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t62.192.94.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t670.1344.672.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t766.1536.768.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t878.1760.880.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t878.2640.1320.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t92.192.94.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_lonsperlat.t94.192.96.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_maskh.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_master-catchup_parmlist + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_maxice.2x2.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t1148.2304.1152.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t126.384.190.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t126.384.190.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t1534.3072.1536.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t1534.3072.1536.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t170.512.256.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t190.384.192.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t190.384.192.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t190.576.288.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t254.512.256.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t254.512.256.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t254.768.384.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t382.1152.576.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t382.768.384.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t382.768.384.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t574.1152.576.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t574.1152.576.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t574.1760.880.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t62.192.94.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t670.1344.672.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t670.1344.672.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t766.1536.768.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t878.1760.880.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t878.2640.1320.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t92.192.94.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mtnvar.t92.192.94.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t126.384.190.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t1534.3072.1536.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t190.384.192.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t190.576.288.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t254.512.256.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t382.768.384.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t574.1152.576.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t62.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t670.1344.672.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t766.1536.768.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t766.1536.768.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t92.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_mxsnoalb.uariz.t94.192.96.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_npoess_paramlist + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_o3clim.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_o3prdlos.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t126.384.190.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t126.384.190.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t1534.3072.1536.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t1534.3072.1536.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t190.384.192.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t190.384.192.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t254.512.256.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t254.512.256.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t382.768.384.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t382.768.384.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t574.1152.576.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t574.1152.576.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t62.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t670.1344.672.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t670.1344.672.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t766.1536.768.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t766.1536.768.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t92.192.94.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography.t92.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t126.384.190.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t126.384.190.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t1534.3072.1536.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t1534.3072.1536.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t190.384.192.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t190.384.192.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t254.512.256.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t254.512.256.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t382.768.384.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t382.768.384.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t574.1152.576.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t574.1152.576.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t62.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t670.1344.672.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t670.1344.672.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t766.1536.768.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t766.1536.768.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t92.192.94.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_uf.t92.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_orography_0.5x0.5.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_salclm.t1534.3072.1536.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_sfc_emissivity_idx.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_shdmax.0.144x0.144.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_shdmax.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_shdmin.0.144x0.144.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_shdmin.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_siglevel.l28.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_siglevel.l42.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_siglevel.l64.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t126.384.190.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t126.384.190.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t1534.3072.1536.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t1534.3072.1536.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t190.384.192.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t190.384.192.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t254.512.256.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t254.512.256.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t382.768.384.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t382.768.384.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t574.1152.576.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t574.1152.576.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t62.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t670.1344.672.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t670.1344.672.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t766.1536.768.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t766.1536.768.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t766.1536.768.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t92.192.94.rg.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slmask.t92.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slope.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_slptyp.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snoalb.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snoalb.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snoclim.1.875.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t126.384.190.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t1534.3072.1536.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t190.384.192.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t190.576.288.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t254.512.256.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t382.768.384.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t574.1152.576.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t62.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t670.1344.672.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t766.1536.768.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t766.1536.768.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t92.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_snowfree_albedo.bosu.t94.192.96.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmcpc.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t766.1536.768.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.statsgo.t94.192.96.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t190.576.288.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t766.1536.768.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soilmgldas.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t126.384.190.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t1534.3072.1536.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t190.384.192.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t190.576.288.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t254.512.256.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t382.768.384.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t574.1152.576.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t62.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t670.1344.672.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t766.1536.768.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t766.1536.768.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t92.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_soiltype.statsgo.t94.192.96.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstant_cmip_an.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstant_cmip_mn.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstant_noaa_a0.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstant_noaa_an.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstant_noaa_an.txt_v2011 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstant_noaa_an.txt_v2019 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_solarconstantdata.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_spectral_coefs.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_sstclim.2x2.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_tbthe.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_tg3clim.2.6x1.5.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_transmittance_coefs.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vars.l28.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vars.l42.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vars.l64.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegfrac.0.144.decpercent.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegfrac.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t1148.2304.1152.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t126.384.190.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t126.384.190.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t1534.3072.1536.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t1534.3072.1536.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t170.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t190.384.192.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t190.384.192.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t190.576.288.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t190.576.288.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t254.512.256.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t254.512.256.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t254.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t382.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t382.768.384.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t382.768.384.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t574.1152.576.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t574.1152.576.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t574.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t62.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t62.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t670.1344.672.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t670.1344.672.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t766.1536.768.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t766.1536.768.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t878.1760.880.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t878.2640.1320.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t92.192.94.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t92.192.94.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_vegtype.igbp.t94.192.96.rg.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1850-1859.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1860-1869.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1870-1879.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1880-1889.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1890-1899.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1900-1909.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1910-1919.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1920-1929.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1930-1939.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1940-1949.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1950-1959.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1960-1969.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1970-1979.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1980-1989.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_volcanic_aerosols_1990-1999.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/global_zorclim.1x1.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/HGT.Beljaars_filtered.lat-lon.30s_res.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/latlon_grid3.32769.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/ozone.clim + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/qr_acr_qg.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/qr_acr_qgV2.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/qr_acr_qs.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/qr_acr_qsV2.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/rrtmgp-cloud-optics-coeffs-lw.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/rrtmgp-cloud-optics-coeffs-sw.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/rrtmgp-data-lw-g256-2018-12-04.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/rrtmgp-data-sw-g224-2018-12-04.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/rrtmgp-lw-prototype-g128-210413.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/rrtmgp-sw-prototype-g131-210413.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/RTGSST.1982.2012.monthly.clim.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/seaice_newland.grb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/syndat_fildef.vit + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/syndat_slmask.t126.gaussian + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/syndat_stmnames + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/syndat_stmnames_old + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/syndat_stmnames_old1 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/syndat_stmnames_old2 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/Thompson_MP_MONTHLY_CLIMO.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/ugwp_limb_tau.nc + + +``fix_am/co2dat_4a/`` Files: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. code-block:: console + + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1956.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1957.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1958.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1959.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1960.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1961.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1962.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1963.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1964.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1965.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1966.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1967.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1968.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1969.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1970.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1971.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1972.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1973.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1974.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1975.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1976.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1977.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1978.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1979.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1980.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1981.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1982.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1983.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1984.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1985.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1986.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1987.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1988.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1989.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1990.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1991.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1992.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1993.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1994.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1995.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1996.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1997.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1998.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_1999.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2000.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2001.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2002.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2003.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2004.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2005.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2006.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2007.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2008.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2009.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2009.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2009.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2010.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2010.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2010.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2011.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2011.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2011.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2012.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2012.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2012.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2013.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2013.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2013.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2014.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2014.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2014.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2015.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2015.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2015.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2016.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2016.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2016.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2017.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2017.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2017.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2018.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2018.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2018.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2019.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2019.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2019.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2020.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2020.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2020.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2021.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2021.txt_proj_u + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_2022.txt_proj + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2historicaldata_glob.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2monthlycyc1976_2006.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/global_co2monthlycyc1976_2009.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/co2dat_4a/MEMO + + +``fix_am/fix_co2_proj`` Files: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. code-block:: console + + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2009.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2010.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2011.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2012.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2013.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2014.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2015.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2016.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2017.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2018.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2019.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2020.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2021.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_proj/global_co2historicaldata_2022.txt + + +``fix_am/fix_co2_update`` Files: +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. code-block:: console + + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2009.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2010.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2011.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2012.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2013.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2014.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2015.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2016.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2017.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2018.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2019.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2020.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_am/fix_co2_update/global_co2historicaldata_2021.txt + + +``fix_lut`` Files +--------------------- + +.. code-block:: console + + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_lut/optics_BC.v1_3.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_lut/optics_DU.v15_3.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_lut/optics_DU.v15_3.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_lut/optics_OC.v1_3.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_lut/optics_SS.v3_3.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_lut/optics_SU.v1_3.dat + + +``fix_orog`` Files +--------------------- + +.. code-block:: console + + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/clmgrb + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/clmgrb.index + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/convert.f90 + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/gmted2010.30sec.flt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/gmted2010.30sec.int + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/gmted2010.30sec.flt.ctl + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/gmted2010.30sec.int.ctl + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/thirty.second.antarctic.new.bin + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/GlobalLakeDepth.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/GlobalLakeDepth.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/GlobalLakeStatus.dat + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/GlobalLakeStatus.txt + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/gtopo30_gg.fine + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/gtopo30_gg.fine.nh + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/landcover30.fixed + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/makefile + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/run.lsf + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/TOP8M_avg.20I4.asc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/TOP8M_max.20I4.asc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_orog/TOP8M_slm.80I1.asc + + + +``fix_sfc_climo`` Files +-------------------------- + +.. code-block:: console + + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/facsf.1.0.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.igbp.0.03.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/leaf_area_index.0.05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.igbp.0.05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/maximum_snow_albedo.0.05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.igbp.conus.0.01.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/slope_type.1.0.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.modis.igbp.0.03.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/snowfree_albedo.4comp.0.05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.modis.igbp.0.05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/soil_type.statsgo.0.03.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.modis.igbp.conus.0.01.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/soil_type.statsgo.0.05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.viirs.igbp.0.03.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/soil_type.statsgo.conus.0.01.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.viirs.igbp.0.05.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/substrate_temperature.1.0.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.viirs.igbp.0.1.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/substrate_temperature.2.6x1.5.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_type.viirs.igbp.conus.0.01.nc + wget https://noaa-ufs-srw-pds.s3.amazonaws.com/develop-20240618/fix/fix_sfc_climo/vegetation_greenness.0.144.nc + + diff --git a/environment.yml b/environment.yml index c574df5e23..a735213198 100644 --- a/environment.yml +++ b/environment.yml @@ -5,4 +5,4 @@ channels: dependencies: - pylint=2.17* - pytest=7.2* - - uwtools=1.0.0 + - uwtools=2.3* diff --git a/etc/lmod-setup.csh b/etc/lmod-setup.csh index 92a4394893..af79ad8a70 100644 --- a/etc/lmod-setup.csh +++ b/etc/lmod-setup.csh @@ -37,7 +37,7 @@ else if ( "$L_MACHINE" == singularity ) then module purge -else if ( "$L_MACHINE" == gaea-c5 ) then +else if ( "$L_MACHINE" == gaea ) then module reset else if ( "$L_MACHINE" == derecho ) then diff --git a/etc/lmod-setup.sh b/etc/lmod-setup.sh index 7328dea76f..b030d2a9f5 100644 --- a/etc/lmod-setup.sh +++ b/etc/lmod-setup.sh @@ -44,7 +44,7 @@ elif [ "$L_MACHINE" = singularity ]; then module purge -elif [ "$L_MACHINE" = gaea-c5 ]; then +elif [ "$L_MACHINE" = gaea ]; then module reset elif [ "$L_MACHINE" = derecho ]; then diff --git a/jobs/JREGIONAL_AQM_ICS b/jobs/JREGIONAL_AQM_ICS deleted file mode 100755 index 5c8ba9c8dd..0000000000 --- a/jobs/JREGIONAL_AQM_ICS +++ /dev/null @@ -1,112 +0,0 @@ -#!/usr/bin/env bash - -# -#----------------------------------------------------------------------- -# -# This script adds extra species for air quality modeling (AQM) to the -# initial conditions (ICs). -# -#----------------------------------------------------------------------- -# - -# -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh -source_config_for_task "task_aqm_ics" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the J-job script for the task that copies/fetches to a local -directory (either from disk or HPSS) the aqm boundary conditions from -which the model needs. -========================================================================" -# -#----------------------------------------------------------------------- -# -# Set the name of and create the directory in which the output from this -# script will be placed (if it doesn't already exist). -# -#----------------------------------------------------------------------- -# -if [ $RUN_ENVIR = "nco" ]; then - export INPUT_DATA="${COMIN}" -else - export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT" -fi -mkdir_vrfy -p "${INPUT_DATA}" -# -#----------------------------------------------------------------------- -# -# Set the run directory -# -#----------------------------------------------------------------------- -# -if [ "${RUN_ENVIR}" = "community" ]; then - DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_AQM_ICS}" - check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA -fi -# -#----------------------------------------------------------------------- -# -# Call the ex-script for this J-job and pass to it the necessary variables. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exregional_aqm_ics.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# -#----------------------------------------------------------------------- -# -# Run job postamble. -# -#----------------------------------------------------------------------- -# -job_postamble -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/function. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 - diff --git a/jobs/JREGIONAL_AQM_LBCS b/jobs/JREGIONAL_AQM_LBCS deleted file mode 100755 index c711f90288..0000000000 --- a/jobs/JREGIONAL_AQM_LBCS +++ /dev/null @@ -1,113 +0,0 @@ -#!/usr/bin/env bash - -# -#----------------------------------------------------------------------- -# -# This script adds extra species for air quality modeling (AQM) to the -# lateral boundary conditions (LBCs) files. -# -#----------------------------------------------------------------------- -# - -# -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh -source_config_for_task "task_get_extrn_lbcs|task_make_orog|task_make_lbcs|cpl_aqm_parm|task_aqm_lbcs" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the J-job script for the task that copies/fetches to a local -directory (either from disk or HPSS) the aqm boundary conditions from -which the model needs. -========================================================================" -# -#----------------------------------------------------------------------- -# -# Set the name of and create the directory in which the output from this -# script will be placed (if it doesn't already exist). -# -#----------------------------------------------------------------------- -# -if [ $RUN_ENVIR = "nco" ]; then - export INPUT_DATA="${COMIN}" -else - export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT" -fi -mkdir_vrfy -p "${INPUT_DATA}" -# -#----------------------------------------------------------------------- -# -# Set the run directory -# -#----------------------------------------------------------------------- -# -if [ "${RUN_ENVIR}" = "community" ]; then - DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_AQM_LBCS}" - check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA -fi - -# -#----------------------------------------------------------------------- -# -# Call the ex-script for this J-job and pass to it the necessary variables. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exregional_aqm_lbcs.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# -#----------------------------------------------------------------------- -# -# Run job postamble. -# -#----------------------------------------------------------------------- -# -job_postamble -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/function. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 - diff --git a/jobs/JREGIONAL_BIAS_CORRECTION_O3 b/jobs/JREGIONAL_BIAS_CORRECTION_O3 deleted file mode 100755 index ddcef59494..0000000000 --- a/jobs/JREGIONAL_BIAS_CORRECTION_O3 +++ /dev/null @@ -1,104 +0,0 @@ -#!/usr/bin/env bash - -# -#----------------------------------------------------------------------- -# -# This script runs BIAS-CORRECTION-O3. -# -#----------------------------------------------------------------------- -# - -# -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_bias_correction_o3" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the J-job script for the task that runs BIAS-CORRECTION-O3. -========================================================================" -# -#----------------------------------------------------------------------- -# -# Set the run directory. -# -#----------------------------------------------------------------------- -# -if [ "${RUN_ENVIR}" = "community" ]; then - DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_BIAS_CORRECTION_O3}" - check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA -fi - -mkdir_vrfy -p ${COMOUTwmo} - -export PARMaqm_utils="${PARMaqm_utils:-${HOMEdir}/sorc/AQM-utils/parm}" - -TMP_STDAY=`${NDATE} -8760 ${PDY}${cyc} | cut -c1-8` # 1 year back -export BC_STDAY=${BC_STDAY:-${TMP_STDAY}} -# -#----------------------------------------------------------------------- -# -# Call the ex-script for this J-job and pass to it the necessary varia- -# bles. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exregional_bias_correction_o3.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# -#----------------------------------------------------------------------- -# -# Run job postamble. -# -#----------------------------------------------------------------------- -# -job_postamble -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 - diff --git a/jobs/JREGIONAL_BIAS_CORRECTION_PM25 b/jobs/JREGIONAL_BIAS_CORRECTION_PM25 deleted file mode 100755 index 7e08b02a12..0000000000 --- a/jobs/JREGIONAL_BIAS_CORRECTION_PM25 +++ /dev/null @@ -1,104 +0,0 @@ -#!/usr/bin/env bash - -# -#----------------------------------------------------------------------- -# -# This script runs BIAS-CORRECTION-PM25. -# -#----------------------------------------------------------------------- -# - -# -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_bias_correction_pm25" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the J-job script for the task that runs BIAS-CORRECTION-PM25. -========================================================================" -# -#----------------------------------------------------------------------- -# -# Set the run directory. -# -#----------------------------------------------------------------------- -# -if [ "${RUN_ENVIR}" = "community" ]; then - DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_BIAS_CORRECTION_PM25}" - check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA -fi - -mkdir_vrfy -p ${COMOUTwmo} - -export PARMaqm_utils="${PARMaqm_utils:-${HOMEdir}/sorc/AQM-utils/parm}" - -TMP_STDAY=`${NDATE} -8760 ${PDY}${cyc} | cut -c1-8` # 1 year back -export BC_STDAY=${BC_STDAY:-${TMP_STDAY}} -# -#----------------------------------------------------------------------- -# -# Call the ex-script for this J-job and pass to it the necessary varia- -# bles. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exregional_bias_correction_pm25.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# -#----------------------------------------------------------------------- -# -# Run job postamble. -# -#----------------------------------------------------------------------- -# -job_postamble -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 - diff --git a/jobs/JREGIONAL_CHECK_POST_OUTPUT b/jobs/JREGIONAL_CHECK_POST_OUTPUT index 2b1fe69bbb..358b1fad72 100755 --- a/jobs/JREGIONAL_CHECK_POST_OUTPUT +++ b/jobs/JREGIONAL_CHECK_POST_OUTPUT @@ -3,7 +3,22 @@ # #----------------------------------------------------------------------- # +# The J-Job script for checking the post output. # +# Run-time environment variables: +# +# CDATE +# ENSMEM_INDX +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# workflow: +# EXPTDIR # #----------------------------------------------------------------------- # @@ -16,7 +31,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -51,10 +68,11 @@ print_info_msg " Entering script: \"${scrfunc_fn}\" In directory: \"${scrfunc_dir}\" -This is the J-job script for the task that checks that all the post- -processed output files in fact exist and are at least a certain age. -These files may have been generated by UPP as part of the current SRW -App workflow, or they may be user-staged. +This is the J-job for the task that checks that no more than +NUM_MISSING_FCST_FILES_MAX of each forecast's (ensemble member's) post- +processed output files are missing. Note that such files may have been +generated by UPP as part of the current SRW App workflow, or they may be +user-staged. ========================================================================" # #----------------------------------------------------------------------- @@ -70,15 +88,15 @@ Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." # #----------------------------------------------------------------------- # -# Create a flag file to make rocoto aware that the make_grid task has -# successfully completed (so that other tasks that depend on it can be -# launched). +# Create a flag file to make rocoto aware that the check_post_output task +# has successfully completed (so that other tasks that depend on it can +# be launched). # #----------------------------------------------------------------------- # ensmem_name="mem${ENSMEM_INDX}" cycle_dir="$EXPTDIR/$CDATE" -mkdir_vrfy -p "${cycle_dir}" +mkdir -p "${cycle_dir}" touch "${cycle_dir}/post_files_exist_${ensmem_name}.txt" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_GET_EXTRN_MDL_FILES b/jobs/JREGIONAL_GET_EXTRN_MDL_FILES index 8efd332dd9..fbd582201a 100755 --- a/jobs/JREGIONAL_GET_EXTRN_MDL_FILES +++ b/jobs/JREGIONAL_GET_EXTRN_MDL_FILES @@ -3,20 +3,48 @@ # #----------------------------------------------------------------------- # -# This script gets either from the system directory or from mass store -# (HPSS) the files generated by the external model (specified by the -# variable EXTRN_MDL_NAME) for either the initial conditions (ICs) or the -# lateral boundary conditions (LBCs). Which of these we are considering -# depends on the value of the variable ICS_OR_LBCS, which should be defined -# in the environment (when calling this script from a rocoto workflow, -# the workflow should define this variable, e.g. using rocoto's -# tag). -# -# Note that when we refer to ICs, we are referring to not only the atmospheric -# fields at the initial time but also various surface fields (which are -# for now time-independent) as well as the 0-th forecast hour LBCs. Also, -# when we refer to LBCs, we are referring to the LBCs excluding the one -# at the 0-th hour. +# The J-Job script for getting the model files that will be used for +# either initial conditions or lateral boundary conditions for the +# experiment. +# +# Run-time environment variables: +# +# CDATE +# COMIN +# cyc +# DATA +# EXTRN_MDL_STAGING_DIR +# GLOBAL_VAR_DEFNS_FP +# ICS_OR_LBCS +# PDY +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# workflow: +# EXPTDIR +# +# task_get_extrn_lbcs: +# EXTRN_MDL_FILES_LBCS +# EXTRN_MDL_LBCS_OFFSET_HRS +# EXTRN_MDL_NAME_LBCS +# EXTRN_MDL_SOURCE_BASEDIR_LBCS +# EXTRN_MDL_SYSBASEDIR_LBCS +# FV3GFS_FILE_FMT_LBCS +# LBC_SPEC_INTVL_HRS +# USE_USER_STAGED_EXTRN_FILES +# +# task_get_extrn_ics: +# EXTRN_MDL_FILES_ICS +# EXTRN_MDL_ICS_OFFSET_HRS +# EXTRN_MDL_NAME_ICS +# EXTRN_MDL_SOURCE_BASEDIR_ICS +# EXTRN_MDL_SYSBASEDIR_ICS +# FV3GFS_FILE_FMT_ICS +# USE_USER_STAGED_EXTRN_FILES # #----------------------------------------------------------------------- # @@ -29,8 +57,12 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_get_extrn_ics|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh "TRUE" +for sect in user nco workflow task_get_extrn_lbcs task_get_extrn_ics ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done +. $USHdir/job_preamble.sh + + # #----------------------------------------------------------------------- # @@ -222,8 +254,8 @@ if [ $RUN_ENVIR = "nco" ]; then export EXTRN_MDL_STAGING_DIR="${EXTRN_MDL_STAGING_DIR:-${DATA}}" else export EXTRN_MDL_STAGING_DIR="${COMIN}/${EXTRN_MDL_NAME}/for_${ICS_OR_LBCS}" - mkdir_vrfy -p "${EXTRN_MDL_STAGING_DIR}" - cd_vrfy "${EXTRN_MDL_STAGING_DIR}" + mkdir -p "${EXTRN_MDL_STAGING_DIR}" + cd "${EXTRN_MDL_STAGING_DIR}" fi # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_GET_VERIF_OBS b/jobs/JREGIONAL_GET_VERIF_OBS index 3820a739db..7c083e96c6 100755 --- a/jobs/JREGIONAL_GET_VERIF_OBS +++ b/jobs/JREGIONAL_GET_VERIF_OBS @@ -3,7 +3,19 @@ # #----------------------------------------------------------------------- # -# This script checks, pulls, and stages observation data for model verification. +# The J-Job script that checks, pulls, and stages observation data for +# model verification. +# +# Run-time environment variables: +# +# CDATE +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -16,7 +28,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task " " ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_POST_STAT_PM25 b/jobs/JREGIONAL_INTEGRATION_TEST similarity index 77% rename from jobs/JREGIONAL_POST_STAT_PM25 rename to jobs/JREGIONAL_INTEGRATION_TEST index cd86879a73..983981ecf3 100755 --- a/jobs/JREGIONAL_POST_STAT_PM25 +++ b/jobs/JREGIONAL_INTEGRATION_TEST @@ -1,9 +1,27 @@ -#!/usr/bin/env bash +#!/bin/bash + # #----------------------------------------------------------------------- # -# This script runs POST-STAT-PM25. +# This J-Job script runs a set of tests at the end of WE2E tests. +# +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# CDATE +# FCST_DIR +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# RUN_ENV +# SCRIPTSdir +# USHdir +# +# workflow: +# FCST_LEN_HRS # #----------------------------------------------------------------------- # @@ -16,8 +34,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_pm25" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh + # #----------------------------------------------------------------------- # @@ -51,34 +72,30 @@ print_info_msg " Entering script: \"${scrfunc_fn}\" In directory: \"${scrfunc_dir}\" -This is the J-job script for the task that runs POST-UPP-STAT. +This is the J-job script for the plotting task ========================================================================" # #----------------------------------------------------------------------- # -# Set the run directory. +# Set grid name and COMOUT locations. # #----------------------------------------------------------------------- # -DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_POST_STAT_PM25}" -if [ "${RUN_ENVIR}" = "community" ]; then - check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA +if [ "${RUN_ENVIR}" != "nco" ]; then + export fcst_dir="${FCST_DIR}${SLASH_ENSMEM_SUBDIR}" +else + export fcst_dir="${FCST_DIR}" fi - -mkdir_vrfy -p ${COMOUTwmo} - -export PARMaqm_utils="${PARMaqm_utils:-${HOMEdir}/sorc/AQM-utils/parm}" # #----------------------------------------------------------------------- # -# Call the ex-script for this J-job and pass to it the necessary varia- -# bles. +# Call the ex-script for this J-job and pass to it the necessary variables. # #----------------------------------------------------------------------- # -$SCRIPTSdir/exregional_post_stat_pm25.sh || \ +$SCRIPTSdir/exregional_integration_test.py \ + --fcst_dir=$fcst_dir \ + --fcst_len=${FCST_LEN_HRS} || \ print_err_msg_exit "\ Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." # @@ -92,8 +109,7 @@ job_postamble # #----------------------------------------------------------------------- # -# Restore the shell options saved at the beginning of this script/func- -# tion. +# Restore the shell options saved at the beginning of this script/function. # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_MAKE_GRID b/jobs/JREGIONAL_MAKE_GRID index 844d782bc7..01484041e9 100755 --- a/jobs/JREGIONAL_MAKE_GRID +++ b/jobs/JREGIONAL_MAKE_GRID @@ -3,97 +3,25 @@ # #----------------------------------------------------------------------- # -# This script generates grid and orography files in NetCDF format that -# are required as inputs for running the FV3-LAM model (i.e. the FV3 mo- -# del on a regional domain). It in turn calls three other scripts whose -# file names are specified in the variables grid_gen_scr, orog_gen_scr, -# and orog_fltr_scr and then calls the executable defined in the varia- -# ble shave_exec. These scripts/executable perform the following tasks: -# -# 1) grid_gen_scr: -# -# This script generates grid files that will be used by subsequent -# preprocessing steps. It places its output in the directory defined -# by GRID_DIR. Note that: -# -# a) This script creates grid files for each of the 7 tiles of the -# cubed sphere grid (where tiles 1 through 6 cover the globe, and -# tile 7 is the regional grid located somewhere within tile 6) -# even though the forecast will be performed only on tile 7. -# -# b) The tile 7 grid file that this script creates includes a halo, -# i.e. a layer of cells beyond the boundary of tile 7). The width -# of this halo (i.e. the number of cells in the halo in the direc- -# tion perpendicular to the boundary of the tile) must be made -# large enough such that the "shave" steps later below (which take -# this file as input and generate grid files with thinner halos) -# have a wide enough starting halo to work with. More specifical- -# ly, the FV3-LAM model needs as inputs two grid files: one with a -# halo that is 3 cells and another with a halo that is 4 cells -# wide. Thus, the halo in the grid file that the grid_gen_scr -# script generates must be greater than 4 since otherwise, the -# shave steps would shave off cells from within the interior of -# tile 7. We will let NHW denote the width of the halo in the -# grid file generated by grid_gen_scr. The "n" in this variable -# name denotes number of cells, the "h" is used to indicate that -# it refers to a halo region, the "w" is used to indicate that it -# refers to a wide halo (i.e. wider than the 3-cell and 4-cell ha- -# los that the FV3-LAM model requires as inputs, and the "T7" is -# used to indicate that the cell count is on tile 7. -# -# 2) orog_gen_scr: -# -# This script generates the orography file. It places its output in -# the directory defined by OROG_DIR. Note that: -# -# a) This script generates an orography file only on tile 7. -# -# b) This orography file contains a halo of the same width (NHW) -# as the grid file for tile 7 generated by the grid_gen_scr script -# in the previous step. -# -# 3) orog_fltr_scr: -# -# This script generates a filtered version of the orography file ge- -# nerated by the script orog_gen_scr. This script places its output -# in the temporary directory defined in WORKDIR_FLTR. Note that: -# -# a) The filtered orography file generated by this script contains a -# halo of the same width (NHW) as the (unfiltered) orography file -# generated by script orog_gen_scr (and the grid file generated by -# grid_gen_scr). -# -# b) In analogy with the input grid files, the FV3-LAM model needs as -# input two (filtered) orography files -- one with no halo cells -# and another with 3. These are obtained later below by "shaving" -# off layers of halo cells from the (filtered) orography file ge- -# nerated in this step. -# -# 4) shave_exec: -# -# This "shave" executable is called 4 times to generate 4 files from -# the tile 7 grid file generated by grid_gen_scr and the tile 7 fil- -# tered orography file generated by orog_fltr_scr (both of which have -# a halo of width NHW cells). The 4 output files are placed in the -# temporary directory defined in WORKDIR_SHVE. More specifically: -# -# a) shave_exec is called to shave the halo in the tile 7 grid file -# generated by grid_gen_scr down to a width of 3 cells and store -# the result in a new grid file in WORKDIR_SHVE. -# -# b) shave_exec is called to shave the halo in the tile 7 grid file -# generated by grid_gen_scr down to a width of 4 cells and store -# the result in a new grid file in WORKDIR_SHVE. -# -# c) shave_exec is called to shave the halo in the tile 7 filtered -# orography file generated by orog_fltr_scr down to a width of 0 -# cells (i.e. no halo) and store the result in a new filtered oro- -# graphy file in WORKDIR_SHVE. -# -# d) shave_exec is called to shave the halo in the tile 7 filtered -# orography file generated by orog_fltr_scr down to a width of 4 -# cells and store the result in a new filtered orography file in -# WORKDIR_SHVE. +# The J-Job that generates input NetCDF grid files for running the +# regional configuration of FV3 +# +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# DATA +# +# Experiment variables +# +# user: +# USHdir +# SCRIPTSdir +# +# workflow: +# PREEXISTING_DIR_METHOD +# +# task_make_grid: +# GRID_DIR # #----------------------------------------------------------------------- # @@ -106,7 +34,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_grid" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow task_make_grid ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -153,7 +83,7 @@ This is the J-job script for the task that generates grid files. #----------------------------------------------------------------------- # check_for_preexist_dir_file "${GRID_DIR}" "${PREEXISTING_DIR_METHOD}" -mkdir_vrfy -p "${GRID_DIR}" +mkdir -p "${GRID_DIR}" # #----------------------------------------------------------------------- # @@ -162,7 +92,7 @@ mkdir_vrfy -p "${GRID_DIR}" #----------------------------------------------------------------------- # DATA="${DATA:-${GRID_DIR}/tmp}" -mkdir_vrfy -p "$DATA" +mkdir -p "$DATA" # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_MAKE_ICS b/jobs/JREGIONAL_MAKE_ICS index 1e38f4058d..10a3b36fb7 100755 --- a/jobs/JREGIONAL_MAKE_ICS +++ b/jobs/JREGIONAL_MAKE_ICS @@ -1,5 +1,31 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The J-job to run chgres_cube for preparing initial conditions for the +# FV3 forecast +# +# Run-time environment variables: +# +# COMIN +# DATA +# GLOBAL_VAR_DEFNS_FP +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# RUN_ENVIR +# SCRIPTSdir +# USHdir +# +# workflow: +# EXPTDIR +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +34,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_ics" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -56,11 +84,11 @@ for the FV3 (in NetCDF format). #----------------------------------------------------------------------- # if [ $RUN_ENVIR = "nco" ]; then - export INPUT_DATA="${COMIN}" + export INPUT_DATA="${COMIN}" else - export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT" + export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT" fi -mkdir_vrfy -p "${INPUT_DATA}" +mkdir -p "${INPUT_DATA}" # # #----------------------------------------------------------------------- @@ -72,8 +100,8 @@ mkdir_vrfy -p "${INPUT_DATA}" if [ $RUN_ENVIR = "community" ]; then DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_MAKE_ICS}" check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA + mkdir -p $DATA + cd $DATA fi # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_LBCS b/jobs/JREGIONAL_MAKE_LBCS index 4c524e26a6..91d9d3edbe 100755 --- a/jobs/JREGIONAL_MAKE_LBCS +++ b/jobs/JREGIONAL_MAKE_LBCS @@ -1,5 +1,29 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The J-job to run chgres_cube for preparing lateral boundary conditions +# for the FV3 forecast +# +# Run-time environment variables: +# +# CDATE +# COMIN +# DATA +# GLOBAL_VAR_DEFNS_FP +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# RUN_ENVIR +# SCRIPTSdir +# USHdir +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +32,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_lbcs" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -56,11 +82,11 @@ hour zero). #----------------------------------------------------------------------- # if [ $RUN_ENVIR = "nco" ]; then - export INPUT_DATA="${COMIN}" + export INPUT_DATA="${COMIN}" else - export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT" + export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT" fi -mkdir_vrfy -p "${INPUT_DATA}" +mkdir -p "${INPUT_DATA}" # #----------------------------------------------------------------------- # @@ -71,8 +97,8 @@ mkdir_vrfy -p "${INPUT_DATA}" if [ "${RUN_ENVIR}" = "community" ]; then DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_MAKE_LBCS}" check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA + mkdir -p $DATA + cd $DATA fi # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_OROG b/jobs/JREGIONAL_MAKE_OROG index b6f674e5ee..28e2f965a5 100755 --- a/jobs/JREGIONAL_MAKE_OROG +++ b/jobs/JREGIONAL_MAKE_OROG @@ -1,5 +1,27 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The J-Job that generates input NetCDF orography files for running the +# regional configuration of FV3 +# +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# task_make_orog: +# OROG_DIR +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +30,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_orog" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow task_make_orog ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_SFC_CLIMO b/jobs/JREGIONAL_MAKE_SFC_CLIMO index eee25b193a..30b2d2c346 100755 --- a/jobs/JREGIONAL_MAKE_SFC_CLIMO +++ b/jobs/JREGIONAL_MAKE_SFC_CLIMO @@ -1,5 +1,30 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The J-job to run chgres_cube for preparing lateral boundary conditions +# for the FV3 forecast +# +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# workflow: +# PREEXISTING_DIR_METHOD +# +# task_make_sfc_climo: +# SFC_CLIMO_DIR +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +33,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_sfc_climo" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow task_make_sfc_climo ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -55,7 +82,7 @@ climatology. #----------------------------------------------------------------------- # check_for_preexist_dir_file "${SFC_CLIMO_DIR}" "${PREEXISTING_DIR_METHOD}" -mkdir_vrfy -p "${SFC_CLIMO_DIR}" +mkdir -p "${SFC_CLIMO_DIR}" # #----------------------------------------------------------------------- # @@ -66,7 +93,7 @@ mkdir_vrfy -p "${SFC_CLIMO_DIR}" DATA="${DATA:-${SFC_CLIMO_DIR}/tmp}" if [ $RUN_ENVIR != "nco" ]; then check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy $DATA + mkdir $DATA fi # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_NEXUS_EMISSION b/jobs/JREGIONAL_NEXUS_EMISSION deleted file mode 100755 index 915de0f054..0000000000 --- a/jobs/JREGIONAL_NEXUS_EMISSION +++ /dev/null @@ -1,111 +0,0 @@ -#!/usr/bin/env bash - -# -#----------------------------------------------------------------------- -# -# This script generate NEXUS emission netcdf file. -# -#----------------------------------------------------------------------- -# - -# -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh -source_config_for_task "task_run_fcst|cpl_aqm_parm|task_nexus_emission" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the J-job script for the task that generates the emission files -using NEXUS which will output for FV3 (in NetCDF format). -========================================================================" -# -#----------------------------------------------------------------------- -# -# Set the name of and create the directory in which the output from this -# script will be placed (if it doesn't already exist). -# -#----------------------------------------------------------------------- -# -if [ $RUN_ENVIR = "nco" ]; then - export INPUT_DATA="${COMIN}/NEXUS" -else - export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/NEXUS" -fi -mkdir_vrfy -p "${INPUT_DATA}" -# -#----------------------------------------------------------------------- -# -# Set the run directory -# -#----------------------------------------------------------------------- -# -if [ "${RUN_ENVIR}" = "community" ]; then - DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_NEXUS_EMISSION_${nspt}}" - check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA -fi -# -#----------------------------------------------------------------------- -# -# Call the ex-script for this J-job. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exregional_nexus_emission.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# -#----------------------------------------------------------------------- -# -# Run job postamble. -# -#----------------------------------------------------------------------- -# -job_postamble -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 - diff --git a/jobs/JREGIONAL_NEXUS_GFS_SFC b/jobs/JREGIONAL_NEXUS_GFS_SFC deleted file mode 100755 index 5fc05f86db..0000000000 --- a/jobs/JREGIONAL_NEXUS_GFS_SFC +++ /dev/null @@ -1,130 +0,0 @@ -#!/usr/bin/env bash - -# -#----------------------------------------------------------------------- -# -# This script gets fire emission data files from disk or HPSS -# -#----------------------------------------------------------------------- -# - -# -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_nexus_gfs_sfc" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh "TRUE" -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the J-job script for the task that copies or fetches GFS surface -data files from disk, or HPSS. -========================================================================" -# -#----------------------------------------------------------------------- -# -# Set the external model start time -# -#----------------------------------------------------------------------- -# -export TIME_OFFSET_HRS=${NEXUS_GFS_SFC_OFFSET_HRS:-0} -yyyymmdd=${PDY} -hh=${cyc} -export GFS_SFC_CDATE=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC - ${TIME_OFFSET_HRS} hours" "+%Y%m%d%H" ) - -# -#----------------------------------------------------------------------- -# -# Check whether GFS_SFC_CDATE data files are available on the specified -# cycle date and time on HPSS. -# -#----------------------------------------------------------------------- -# -CDATE_min="2021032100" -if [ "$GFS_SFC_CDATE" -lt "$CDATE_min" ]; then - print_info_msg " -======================================================================== -GFS surface data (NetCDF) are not available on HPSS for this date. -CDATE: \"${GFS_SFC_CDATE}\" -CDATE_min: \"${CDATE_min}\" - -Therefore, this task will be skipped and MERRA2 data will be used for -NEXUS_EMISSION. -========================================================================" - - exit 0 -fi -# -#----------------------------------------------------------------------- -# -# Set the run directory -# -#----------------------------------------------------------------------- -# -if [ "${RUN_ENVIR}" = "community" ]; then - DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_NEXUS_GFS_SFC}" - check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA -fi -# -#----------------------------------------------------------------------- -# -# Call the ex-script for this J-job and pass to it the necessary variables. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exregional_nexus_gfs_sfc.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# -#----------------------------------------------------------------------- -# -# Run job postamble. -# -#----------------------------------------------------------------------- -# -job_postamble "FALSE" -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/function. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 - diff --git a/jobs/JREGIONAL_NEXUS_POST_SPLIT b/jobs/JREGIONAL_NEXUS_POST_SPLIT deleted file mode 100755 index 7cb8a55bf0..0000000000 --- a/jobs/JREGIONAL_NEXUS_POST_SPLIT +++ /dev/null @@ -1,103 +0,0 @@ -#!/usr/bin/env bash - -# -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_nexus_post_split" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the J-job script for the task that generates the emission files -using NEXUS which will output for FV3 (in NetCDF format). -========================================================================" -# -#----------------------------------------------------------------------- -# -# Set the name of and create the directory in which the output from this -# script will be placed (if it doesn't already exist). -# -#----------------------------------------------------------------------- -# -if [ $RUN_ENVIR = "nco" ]; then - export INPUT_DATA="${COMIN}" -else - export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT" -fi -mkdir_vrfy -p "${INPUT_DATA}" -# -#----------------------------------------------------------------------- -# -# Set the run directory -# -#----------------------------------------------------------------------- -# -if [ "${RUN_ENVIR}" = "community" ]; then - DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_NEXUS_POST_SPLIT}" - check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA -fi -# -#----------------------------------------------------------------------- -# -# Call the ex-script for this J-job. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exregional_nexus_post_split.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# -#----------------------------------------------------------------------- -# -# Run job postamble. -# -#----------------------------------------------------------------------- -# -job_postamble -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 - diff --git a/jobs/JREGIONAL_PLOT_ALLVARS b/jobs/JREGIONAL_PLOT_ALLVARS index 5e59abd93d..be5ee10f82 100755 --- a/jobs/JREGIONAL_PLOT_ALLVARS +++ b/jobs/JREGIONAL_PLOT_ALLVARS @@ -1,5 +1,45 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The J-job to plot the forecast output +# +# Run-time environment variables: +# +# CDATE +# COMOUT +# GLOBAL_VAR_DEFNS_FP +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# RUN_ENVIR +# SCRIPTSdir +# USHdir +# +# platform: +# FIXshp +# +# workflow: +# EXPT_SUBDIR +# PREEXISTING_DIR_METHOD +# PREDEF_GRID_NAME +# +# task_plot_allvars: +# COMOUT_REF +# PLOT_DOMAINS +# PLOT_FCST_END +# PLOT_FCST_INC +# PLOT_FCST_START +# +# task_run_fcst: +# FCST_LEN_HRS +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +48,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_plot_allvars|task_run_fcst" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow task_plot_allvars task_run_fcst ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -67,6 +109,11 @@ COMOUT_REF=$(eval echo ${COMOUT_REF}) #----------------------------------------------------------------------- # +if [ -n "${SRW_GRAPHICS_ENV:-}" ] ; then + set +u + conda activate ${SRW_GRAPHICS_ENV} + set -u +fi # plot all variables $SCRIPTSdir/exregional_plot_allvars.py \ --cycle ${CDATE} \ diff --git a/jobs/JREGIONAL_POINT_SOURCE b/jobs/JREGIONAL_POINT_SOURCE deleted file mode 100755 index 57000dd599..0000000000 --- a/jobs/JREGIONAL_POINT_SOURCE +++ /dev/null @@ -1,103 +0,0 @@ -#!/usr/bin/env bash - -# -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh -source_config_for_task "task_run_fcst|cpl_aqm_parm|task_point_source" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the J-job script for the task that generates the point source files. -========================================================================" -# -#----------------------------------------------------------------------- -# -# Set the name of and create the directory in which the output from this -# script will be placed (if it doesn't already exist). -# -#----------------------------------------------------------------------- -# -if [ $RUN_ENVIR = "nco" ]; then - export INPUT_DATA="${COMIN}" -else - export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT" -fi -mkdir_vrfy -p "${INPUT_DATA}" -# -#----------------------------------------------------------------------- -# -# Set the run directory -# -#----------------------------------------------------------------------- -# -if [ "${RUN_ENVIR}" = "community" ]; then - DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_POINT_SOURCE}" - check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA -fi -# -# -#----------------------------------------------------------------------- -# -# Call the ex-script for this J-job. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exregional_point_source.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# -#----------------------------------------------------------------------- -# -# Run job postamble. -# -#----------------------------------------------------------------------- -# -job_postamble -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 - diff --git a/jobs/JREGIONAL_POST_STAT_O3 b/jobs/JREGIONAL_POST_STAT_O3 deleted file mode 100755 index a522d00dbb..0000000000 --- a/jobs/JREGIONAL_POST_STAT_O3 +++ /dev/null @@ -1,101 +0,0 @@ -#!/usr/bin/env bash - -# -#----------------------------------------------------------------------- -# -# This script runs POST-STAT-O3. -# -#----------------------------------------------------------------------- -# - -# -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_o3" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the J-job script for the task that runs POST-STAT-O3. -========================================================================" -# -#----------------------------------------------------------------------- -# -# Set the run directory. -# -#----------------------------------------------------------------------- -# -if [ "${RUN_ENVIR}" = "community" ]; then - DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_POST_STAT_O3}" - check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA -fi - -mkdir_vrfy -p ${COMOUTwmo} - -export PARMaqm_utils="${PARMaqm_utils:-${HOMEdir}/sorc/AQM-utils/parm}" -# -#----------------------------------------------------------------------- -# -# Call the ex-script for this J-job and pass to it the necessary varia- -# bles. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exregional_post_stat_o3.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# -#----------------------------------------------------------------------- -# -# Run job postamble. -# -#----------------------------------------------------------------------- -# -job_postamble -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 - diff --git a/jobs/JREGIONAL_PRE_POST_STAT b/jobs/JREGIONAL_PRE_POST_STAT deleted file mode 100755 index 640c629bce..0000000000 --- a/jobs/JREGIONAL_PRE_POST_STAT +++ /dev/null @@ -1,108 +0,0 @@ -#!/usr/bin/env bash - -# -#----------------------------------------------------------------------- -# -# This script runs PRE-POST-STAT. -# -#----------------------------------------------------------------------- -# - -# -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh -source_config_for_task "task_pre_post_stat" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the J-job script for the task that runs POST-UPP-STAT. -========================================================================" -# -#----------------------------------------------------------------------- -# -# Set the run directory. -# -#----------------------------------------------------------------------- -# -if [ "${RUN_ENVIR}" = "community" ]; then - DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_PRE_POST_STAT}" - check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA -fi -# -#----------------------------------------------------------------------- -# -# Call the ex-script for this J-job and pass to it the necessary varia- -# bles. -# -#----------------------------------------------------------------------- -# -$SCRIPTSdir/exregional_pre_post_stat.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." -# -#----------------------------------------------------------------------- -# -# Run job postamble. -# -#----------------------------------------------------------------------- -# -job_postamble -# -#----------------------------------------------------------------------- -# -# Remove post_complete flag file. -# -#----------------------------------------------------------------------- -# -post_complete_file="${COMIN}/post_${PDY}${cyc}_task_complete.txt" -if [ -f ${post_complete_file} ] ; then - rm_vrfy -f ${post_complete_file} -fi -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 - diff --git a/jobs/JREGIONAL_RUN_FCST b/jobs/JREGIONAL_RUN_FCST index a7f7c96031..2542ab32f8 100755 --- a/jobs/JREGIONAL_RUN_FCST +++ b/jobs/JREGIONAL_RUN_FCST @@ -3,9 +3,24 @@ # #----------------------------------------------------------------------- # -# This script copies files from various directories into the experiment -# directory, creates links to some of them, and modifies others (e.g. -# templates) to customize them for the current experiment setup. +# The J-Job that runs the forecast +# +# Run-time environment variables: +# +# CDATE +# COMIN +# DATA +# GLOBAL_VAR_DEFNS_FP +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# workflow: +# RUN_ENVIR # #----------------------------------------------------------------------- # @@ -18,7 +33,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_fcst" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh "TRUE" # #----------------------------------------------------------------------- @@ -76,8 +93,8 @@ fi # #----------------------------------------------------------------------- # -mkdir_vrfy -p ${DATA}/INPUT -mkdir_vrfy -p ${DATA}/RESTART +mkdir -p ${DATA}/INPUT +mkdir -p ${DATA}/RESTART # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT b/jobs/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT index 707697b5ab..c7aee12df1 100755 --- a/jobs/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT +++ b/jobs/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT @@ -3,7 +3,18 @@ # #----------------------------------------------------------------------- # +# The J-Job that runs that runs either METplus's gen_ens_prod tool or its +# ensemble_stat tool for ensemble verification. # +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -16,7 +27,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_ensgrid|task_run_vx_enspoint" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX index 0301e9946a..e1207e0a81 100755 --- a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX +++ b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX @@ -3,8 +3,18 @@ # #----------------------------------------------------------------------- # -# This script runs the METplus GridStat or PointStat tool for deterministic -# verification. +# This script runs the METplus GridStat or PointStat tool for +# deterministic verification. +# +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -17,7 +27,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_gridstat" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSMEAN b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSMEAN index ab08320f33..29b22502a4 100755 --- a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSMEAN +++ b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSMEAN @@ -3,7 +3,19 @@ # #----------------------------------------------------------------------- # +# The J-Job that runs MET/METplus's GridStat or PointStat tool to +# perform verification on the ensemble mean of a specified field (or +# group of fields). # +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -16,7 +28,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_ensgrid_mean" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB index 7da98212ac..731cf575a5 100755 --- a/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB +++ b/jobs/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB @@ -3,7 +3,19 @@ # #----------------------------------------------------------------------- # +# The J-Job that runs METplus's GridStat or PointStat tool to perform +# verification on the ensemble frequencies/ probabilities of a specified +# field (or group of fields). # +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -16,7 +28,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_ensgrid_prob" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_MET_PB2NC_OBS b/jobs/JREGIONAL_RUN_MET_PB2NC_OBS index e36e72418f..89c9bb73f4 100755 --- a/jobs/JREGIONAL_RUN_MET_PB2NC_OBS +++ b/jobs/JREGIONAL_RUN_MET_PB2NC_OBS @@ -4,6 +4,18 @@ #----------------------------------------------------------------------- # # +# The J-Job that runs METplus for point-stat by initialization time for +# all forecast hours. +# +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -16,7 +28,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_met_pb2nc_obs" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -51,8 +65,8 @@ print_info_msg " Entering script: \"${scrfunc_fn}\" In directory: \"${scrfunc_dir}\" -This is the J-job script for the task that runs METplus for point-stat -by initialization time for all forecast hours. +This is the J-job script for the task that runs METplus for pb2nc on +NDAS observations. ========================================================================" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_MET_PCPCOMBINE b/jobs/JREGIONAL_RUN_MET_PCPCOMBINE index 7364ed96c9..8ac29887e8 100755 --- a/jobs/JREGIONAL_RUN_MET_PCPCOMBINE +++ b/jobs/JREGIONAL_RUN_MET_PCPCOMBINE @@ -3,7 +3,20 @@ # #----------------------------------------------------------------------- # +# The J-job that runs the MET/METplus PcpCombine tool on hourly +# accumulated precipitation (APCP) data to obtain APCP for multi-hour +# accumulation periods. The data can be from CCPA observations or a +# focrecast. # +# Run-time environment variables: +# +# GLOBAL_VAR_DEFNS_FP +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir # #----------------------------------------------------------------------- # @@ -16,7 +29,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_met_pcpcombine" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_POST b/jobs/JREGIONAL_RUN_POST index b4327667a0..58c469fc6d 100755 --- a/jobs/JREGIONAL_RUN_POST +++ b/jobs/JREGIONAL_RUN_POST @@ -3,8 +3,38 @@ # #----------------------------------------------------------------------- # -# This script runs the post-processor (UPP) on the NetCDF output files -# of the write component of the FV3-LAM model. +# The J-Job that runs the Unified Post-processor (UPP) on the NetCDF +# output from FV3. +# +# Run-time environment variables: +# +# COMIN +# COMOUT +# cyc +# DATA +# DATAROOT +# GLOBAL_VAR_DEFNS_FP +# PDY +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# platform: +# WORKFLOW_MANAGER +# +# workflow: +# DATE_FIRST_CYCL +# FCST_LEN_CYCL +# FCST_LEN_HRS +# INCR_CYCL_FREQ +# RUN_ENVIR +# +# task_run_post: +# SUB_HOURLY_POST # #----------------------------------------------------------------------- # @@ -17,7 +47,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_post|task_run_fcst" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -60,7 +92,7 @@ on the output files corresponding to a specified forecast hour. # minutes (fmn) are set to "00". This is necessary in order to pass # "fmn" into the post ex-script for the calculation of post_time. # -if [ "${SUB_HOURLY_POST}" != "TRUE" ]; then +if [ $(boolify "${SUB_HOURLY_POST}") != "TRUE" ]; then export fmn="00" fi # @@ -85,18 +117,18 @@ fi if [ "${RUN_ENVIR}" = "community" ]; then DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" export COMOUT="${DATA}/postprd" - mkdir_vrfy -p "${COMOUT}" + mkdir -p "${COMOUT}" fi -if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then +if [ $(boolify "${SUB_HOURLY_POST}") = "TRUE" ]; then export DATA_FHR="${DATA:-$COMOUT}/$fhr$fmn" else export DATA_FHR="${DATA:-$COMOUT}/$fhr" fi check_for_preexist_dir_file "${DATA_FHR}" "delete" -mkdir_vrfy -p "${DATA_FHR}" +mkdir -p "${DATA_FHR}" -cd_vrfy "${DATA_FHR}" +cd "${DATA_FHR}" # #----------------------------------------------------------------------- # @@ -139,7 +171,7 @@ if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then fcst_len_hrs=$( printf "%03d" "${FCST_LEN_HRS}" ) if [ "${fhr}" = "${fcst_len_hrs}" ]; then - touch "${COMIN}/post_${PDY}${cyc}_task_complete.txt" + touch "${DATAROOT}/DATA_SHARE/${PDY}${cyc}/post_${PDY}${cyc}_task_complete.txt" fi fi fi diff --git a/jobs/JREGIONAL_RUN_PRDGEN b/jobs/JREGIONAL_RUN_PRDGEN index 2d30ced9db..1cf933b666 100755 --- a/jobs/JREGIONAL_RUN_PRDGEN +++ b/jobs/JREGIONAL_RUN_PRDGEN @@ -3,10 +3,33 @@ # #----------------------------------------------------------------------- # -# This script runs wgrib2 to create various subdomain GRIB2 files from -# the raw UPP-generated GRIB2 output from the run_post task of the +# The J-Job that runs wgrib2 to create various subdomain GRIB2 files +# from the raw UPP-generated GRIB2 output from the run_post task of the # FV3-LAM model. # +# Run-time environment variables: +# +# COMIN +# COMOUT +# DATA +# GLOBAL_VAR_DEFNS_FP +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# SCRIPTSdir +# USHdir +# +# platform: +# WORKFLOW_MANAGER +# +# workflow: +# RUN_ENVIR +# +# task_run_post: +# SUB_HOURLY_POST +# #----------------------------------------------------------------------- # @@ -18,7 +41,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_prdgen|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done . $USHdir/job_preamble.sh # #----------------------------------------------------------------------- @@ -84,18 +109,18 @@ DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}}" if [ "${RUN_ENVIR}" = "community" ]; then export COMOUT="${DATA}/postprd" fi -mkdir_vrfy -p "${COMOUT}" +mkdir -p "${COMOUT}" # subhourly post -if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then +if [ $(boolify "${SUB_HOURLY_POST}") = "TRUE" ]; then export DATA_FHR="${DATA:-$COMOUT}/$fhr$fmn" else export DATA_FHR="${DATA:-$COMOUT}/$fhr" fi check_for_preexist_dir_file "${DATA_FHR}" "delete" -mkdir_vrfy -p "${DATA_FHR}" +mkdir -p "${DATA_FHR}" -cd_vrfy "${DATA_FHR}" +cd "${DATA_FHR}" # #----------------------------------------------------------------------- # diff --git a/jobs/JSRW_AQM_ICS b/jobs/JSRW_AQM_ICS new file mode 100755 index 0000000000..5d5f6d970e --- /dev/null +++ b/jobs/JSRW_AQM_ICS @@ -0,0 +1,181 @@ +#!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This script adds extra species for air quality modeling (AQM) to the +# initial conditions (ICs). +# +#----------------------------------------------------------------------- +# +date +export PS4='+ $SECONDS + ' +set -xue +# +#----------------------------------------------------------------------- +# +# Set the NCO standard environment variables (Table 1, pp.4) +# +#----------------------------------------------------------------------- +# +export USHsrw="${HOMEdir}/ush" +export EXECsrw="${HOMEdir}/exec" +export PARMsrw="${HOMEdir}/parm" +export SCRIPTSsrw="${HOMEdir}/scripts" +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +export USHdir="${USHsrw}" # should be removed later +. ${USHsrw}/source_util_funcs.sh +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the J-job script for the task that copies/fetches to a local +directory (either from disk or HPSS) the aqm boundary conditions from +which the model needs. +========================================================================" +# +#----------------------------------------------------------------------- +# +# Define job and jobid by default for rocoto +# +#----------------------------------------------------------------------- +# +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-${NET_default}}" +export RUN="${RUN:-${RUN_default}}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" +fi + +mkdir -p ${COMOUT} + +# Create a teomporary share directory +export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" +mkdir -p ${DATA_SHARE} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY +# +#----------------------------------------------------------------------- +# +# Set sub-cycle and ensemble member names in file/diectory names +# +#----------------------------------------------------------------------- +# +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= +fi +# +#----------------------------------------------------------------------- +# +# Set the name of and create the directory in which the output from this +# script will be placed (if it doesn't already exist). +# +#----------------------------------------------------------------------- +# +if [ $RUN_ENVIR = "nco" ]; then + export INPUT_DATA="${COMIN}" +else + export INPUT_DATA="${COMIN}${SLASH_ENSMEM_SUBDIR}/INPUT" +fi +mkdir -p "${INPUT_DATA}" +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job. +# +#----------------------------------------------------------------------- +# +export pgmout="${DATA}/OUTPUT.$$" +env + +${SCRIPTSsrw}/exsrw_aqm_ics.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi +# +#----------------------------------------------------------------------- +# +# Whether or not working directory DATA should be kept. +# +#----------------------------------------------------------------------- +# +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date + diff --git a/jobs/JSRW_AQM_LBCS b/jobs/JSRW_AQM_LBCS new file mode 100755 index 0000000000..9279dbe190 --- /dev/null +++ b/jobs/JSRW_AQM_LBCS @@ -0,0 +1,183 @@ +#!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This script adds extra species for air quality modeling (AQM) to the +# lateral boundary conditions (LBCs) files. +# +#----------------------------------------------------------------------- +# +date +export PS4='+ $SECONDS + ' +set -xue +# +#----------------------------------------------------------------------- +# +# Set the NCO standard environment variables (Table 1, pp.4) +# +#----------------------------------------------------------------------- +# +export USHsrw="${HOMEdir}/ush" +export EXECsrw="${HOMEdir}/exec" +export PARMsrw="${HOMEdir}/parm" +export SCRIPTSsrw="${HOMEdir}/scripts" +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +export USHdir="${USHsrw}" # should be removed later +. ${USHsrw}/source_util_funcs.sh +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm task_get_extrn_lbcs \ + task_make_orog task_make_lbcs ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the J-job script for the task that copies/fetches to a local +directory (either from disk or HPSS) the aqm boundary conditions from +which the model needs. +========================================================================" +# +#----------------------------------------------------------------------- +# +# Define job and jobid by default for rocoto +# +#----------------------------------------------------------------------- +# +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-${NET_default}}" +export RUN="${RUN:-${RUN_default}}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" +fi +export COMINgefs="${COMINgefs:-${COMINgefs_default}}" + +mkdir -p ${COMOUT} + +# Create a teomporary share directory +export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" +mkdir -p ${DATA_SHARE} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY +# +#----------------------------------------------------------------------- +# +# Set sub-cycle and ensemble member names in file/diectory names +# +#----------------------------------------------------------------------- +# +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= +fi +# +#----------------------------------------------------------------------- +# +# Set the name of and create the directory in which the output from this +# script will be placed (if it doesn't already exist). +# +#----------------------------------------------------------------------- +# +if [ $RUN_ENVIR = "nco" ]; then + export INPUT_DATA="${COMIN}" +else + export INPUT_DATA="${EXPTDIR}/${PDY}${cyc}${SLASH_ENSMEM_SUBDIR}/INPUT" +fi +mkdir -p "${INPUT_DATA}" +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job. +# +#----------------------------------------------------------------------- +# +export pgmout="${DATA}/OUTPUT.$$" +env + +${SCRIPTSsrw}/exsrw_aqm_lbcs.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi +# +#----------------------------------------------------------------------- +# +# Whether or not working directory DATA should be kept. +# +#----------------------------------------------------------------------- +# +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date + diff --git a/jobs/JSRW_BIAS_CORRECTION_O3 b/jobs/JSRW_BIAS_CORRECTION_O3 new file mode 100755 index 0000000000..0849614840 --- /dev/null +++ b/jobs/JSRW_BIAS_CORRECTION_O3 @@ -0,0 +1,164 @@ +#!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This script runs BIAS-CORRECTION-O3. +# +#----------------------------------------------------------------------- +# +date +export PS4='+ $SECONDS + ' +set -xue +# +#----------------------------------------------------------------------- +# +# Set the NCO standard environment variables (Table 1, pp.4) +# +#----------------------------------------------------------------------- +# +export USHsrw="${HOMEdir}/ush" +export EXECsrw="${HOMEdir}/exec" +export PARMsrw="${HOMEdir}/parm" +export SCRIPTSsrw="${HOMEdir}/scripts" +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +export USHdir="${USHsrw}" # should be removed later +. ${USHsrw}/source_util_funcs.sh +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm task_run_post \ + task_bias_correction_o3 ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the J-job script for the task that runs BIAS-CORRECTION-O3. +========================================================================" +# +#----------------------------------------------------------------------- +# +# Define job and jobid by default for rocoto +# +#----------------------------------------------------------------------- +# +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-${NET_default}}" +export RUN="${RUN:-${RUN_default}}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" +fi + +mkdir -p ${COMOUT} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY +# +#----------------------------------------------------------------------- +# +# Set sub-cycle and ensemble member names in file/diectory names +# +#----------------------------------------------------------------------- +# +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= +fi + +TMP_STDAY=`${NDATE} -8760 ${PDY}${cyc} | cut -c1-8` # 1 year back +export BC_STDAY=${BC_STDAY:-${TMP_STDAY}} +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job. +# +#----------------------------------------------------------------------- +# +export pgmout="${DATA}/OUTPUT.$$" +env + +${SCRIPTSsrw}/exsrw_bias_correction_o3.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi +# +#----------------------------------------------------------------------- +# +# Whether or not working directory DATA should be kept. +# +#----------------------------------------------------------------------- +# +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date + diff --git a/jobs/JSRW_BIAS_CORRECTION_PM25 b/jobs/JSRW_BIAS_CORRECTION_PM25 new file mode 100755 index 0000000000..a0a7f76dad --- /dev/null +++ b/jobs/JSRW_BIAS_CORRECTION_PM25 @@ -0,0 +1,164 @@ +#!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This script runs BIAS-CORRECTION-PM25. +# +#----------------------------------------------------------------------- +# +date +export PS4='+ $SECONDS + ' +set -xue +# +#----------------------------------------------------------------------- +# +# Set the NCO standard environment variables (Table 1, pp.4) +# +#----------------------------------------------------------------------- +# +export USHsrw="${HOMEdir}/ush" +export EXECsrw="${HOMEdir}/exec" +export PARMsrw="${HOMEdir}/parm" +export SCRIPTSsrw="${HOMEdir}/scripts" +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +export USHdir="${USHsrw}" # should be removed later +. ${USHsrw}/source_util_funcs.sh +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm task_run_post \ + task_bias_correction_pm25 ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the J-job script for the task that runs BIAS-CORRECTION-PM25. +========================================================================" +# +#----------------------------------------------------------------------- +# +# Define job and jobid by default for rocoto +# +#----------------------------------------------------------------------- +# +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-${NET_default}}" +export RUN="${RUN:-${RUN_default}}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" +fi + +mkdir -p ${COMOUT} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY +# +#----------------------------------------------------------------------- +# +# Set sub-cycle and ensemble member names in file/diectory names +# +#----------------------------------------------------------------------- +# +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= +fi + +TMP_STDAY=`${NDATE} -8760 ${PDY}${cyc} | cut -c1-8` # 1 year back +export BC_STDAY=${BC_STDAY:-${TMP_STDAY}} +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job. +# +#----------------------------------------------------------------------- +# +export pgmout="${DATA}/OUTPUT.$$" +env + +${SCRIPTSsrw}/exsrw_bias_correction_pm25.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi +# +#----------------------------------------------------------------------- +# +# Whether or not working directory DATA should be kept. +# +#----------------------------------------------------------------------- +# +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date + diff --git a/jobs/JREGIONAL_FIRE_EMISSION b/jobs/JSRW_FIRE_EMISSION similarity index 55% rename from jobs/JREGIONAL_FIRE_EMISSION rename to jobs/JSRW_FIRE_EMISSION index fdb6e57b0a..8a2b581274 100755 --- a/jobs/JREGIONAL_FIRE_EMISSION +++ b/jobs/JSRW_FIRE_EMISSION @@ -7,26 +7,32 @@ # #----------------------------------------------------------------------- # - +date +export PS4='+ $SECONDS + ' +set -xue # #----------------------------------------------------------------------- # -# Source the variable definitions file and the bash utility functions. +# Set the NCO standard environment variables (Table 1, pp.4) # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_fire_emission" ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/job_preamble.sh +export USHsrw="${HOMEdir}/ush" +export EXECsrw="${HOMEdir}/exec" +export PARMsrw="${HOMEdir}/parm" +export SCRIPTSsrw="${HOMEdir}/scripts" # #----------------------------------------------------------------------- # -# Save current shell options (in a global array). Then set new options -# for this script/function. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +export USHdir="${USHsrw}" # should be removed later +. ${USHsrw}/source_util_funcs.sh +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -57,78 +63,127 @@ emission data files from disk, or HPSS. # #----------------------------------------------------------------------- # -# Set the external model start time +# Define job and jobid by default for rocoto # #----------------------------------------------------------------------- # -export TIME_OFFSET_HRS=${AQM_FIRE_FILE_OFFSET_HRS:-0} -yyyymmdd=${PDY} -hh=${cyc} -export FIRE_FILE_CDATE=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC - ${TIME_OFFSET_HRS} hours" "+%Y%m%d%H" ) - +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi # #----------------------------------------------------------------------- # -# Check whether FIRE EMISSION data files are available on the specified -# cycle date and time on HPSS (FIRE_FILE_CDATE). +# Create a temp working directory (DATA) and cd into it. # #----------------------------------------------------------------------- # -CDATE_min="2022101500" -if [ "$FIRE_FILE_CDATE" -lt "$CDATE_min" ]; then - print_info_msg " -======================================================================== -RAVE fire emission data are not available on HPSS for this date. -CDATE: \"${FIRE_FILE_CDATE}\" -CDATE_min: \"${CDATE_min}\" -========================================================================" +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-${NET_default}}" +export RUN="${RUN:-${RUN_default}}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" fi + +mkdir -p ${COMOUT} + +export COMINfire="${COMINfire:-${COMINfire_default}}" + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY # #----------------------------------------------------------------------- # -# Set the run directory +# Set sub-cycle and ensemble member names in file/diectory names # #----------------------------------------------------------------------- # -if [ "${RUN_ENVIR}" = "community" ]; then - DATA="${DATA:-${COMIN}${SLASH_ENSMEM_SUBDIR}/tmp_FIRE_EMISSION}" - check_for_preexist_dir_file "$DATA" "delete" - mkdir_vrfy -p $DATA - cd_vrfy $DATA +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= fi # #----------------------------------------------------------------------- # -# Create the directory where the RAVE fire emission files should be stored +# Set the external model start time # #----------------------------------------------------------------------- # -export FIRE_EMISSION_STAGING_DIR="${FIRE_EMISSION_STAGING_DIR:-${COMIN}/FIRE_EMISSION}" -mkdir_vrfy -p "${FIRE_EMISSION_STAGING_DIR}" +export TIME_OFFSET_HRS=${AQM_FIRE_FILE_OFFSET_HRS:-0} +export FIRE_FILE_CDATE=`$NDATE -${TIME_OFFSET_HRS} ${PDY}${cyc}` # #----------------------------------------------------------------------- # -# Call the ex-script for this J-job and pass to it the necessary variables. +# Check whether FIRE EMISSION data files are available on the specified +# cycle date and time on HPSS (FIRE_FILE_CDATE). # #----------------------------------------------------------------------- # -$SCRIPTSdir/exregional_fire_emission.sh || \ -print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." +CDATE_min="2022101500" +if [ "$FIRE_FILE_CDATE" -lt "$CDATE_min" ]; then + print_info_msg " +======================================================================== +RAVE fire emission data are not available on HPSS for this date. +CDATE: \"${FIRE_FILE_CDATE}\" +CDATE_min: \"${CDATE_min}\" +========================================================================" +fi # #----------------------------------------------------------------------- # -# Run job postamble. +# Call the ex-script for this J-job. # #----------------------------------------------------------------------- # -job_postamble +export pgmout="${DATA}/OUTPUT.$$" +env + +${SCRIPTSsrw}/exsrw_fire_emission.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi # #----------------------------------------------------------------------- # -# Restore the shell options saved at the beginning of this script/function. +# Whether or not working directory DATA should be kept. # #----------------------------------------------------------------------- # -{ restore_shell_opts; } > /dev/null 2>&1 +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date diff --git a/jobs/JSRW_NEXUS_EMISSION b/jobs/JSRW_NEXUS_EMISSION new file mode 100755 index 0000000000..aab5869cff --- /dev/null +++ b/jobs/JSRW_NEXUS_EMISSION @@ -0,0 +1,165 @@ +#!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This script generate individual NEXUS emission netcdf file. +# +#----------------------------------------------------------------------- +# +date +export PS4='+ $SECONDS + ' +set -xue +# +#----------------------------------------------------------------------- +# +# Set the NCO standard environment variables (Table 1, pp.4) +# +#----------------------------------------------------------------------- +# +export USHsrw="${HOMEdir}/ush" +export EXECsrw="${HOMEdir}/exec" +export PARMsrw="${HOMEdir}/parm" +export SCRIPTSsrw="${HOMEdir}/scripts" +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +export USHdir="${USHsrw}" # should be removed later +. ${USHsrw}/source_util_funcs.sh +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm task_nexus_emission ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the J-job script for the task that generates the emission files +using NEXUS which will output for FV3 (in NetCDF format). +========================================================================" +# +#----------------------------------------------------------------------- +# +# Define job and jobid by default for rocoto +# +#----------------------------------------------------------------------- +# +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-${NET_default}}" +export RUN="${RUN:-${RUN_default}}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" +fi + +mkdir -p ${COMOUT} + +# Create a teomporary share directory +export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" +mkdir -p ${DATA_SHARE} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY +# +#----------------------------------------------------------------------- +# +# Set sub-cycle and ensemble member names in file/diectory names +# +#----------------------------------------------------------------------- +# +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= +fi +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job. +# +#----------------------------------------------------------------------- +# +export pgmout="${DATA}/OUTPUT.$$" +env + +${SCRIPTSsrw}/exsrw_nexus_emission.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi +# +#----------------------------------------------------------------------- +# +# Whether or not working directory DATA should be kept. +# +#----------------------------------------------------------------------- +# +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date + diff --git a/jobs/JSRW_NEXUS_GFS_SFC b/jobs/JSRW_NEXUS_GFS_SFC new file mode 100755 index 0000000000..ceed6be32a --- /dev/null +++ b/jobs/JSRW_NEXUS_GFS_SFC @@ -0,0 +1,211 @@ +#!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This script gets fire emission data files from disk or HPSS +# +#----------------------------------------------------------------------- +# +date +export PS4='+ $SECONDS + ' +set -xue +# +#----------------------------------------------------------------------- +# +# Set the NCO standard environment variables (Table 1, pp.4) +# +#----------------------------------------------------------------------- +# +export USHsrw="${HOMEdir}/ush" +export EXECsrw="${HOMEdir}/exec" +export PARMsrw="${HOMEdir}/parm" +export SCRIPTSsrw="${HOMEdir}/scripts" +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +export USHdir="${USHsrw}" # should be removed later +. ${USHsrw}/source_util_funcs.sh +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the J-job script for the task that copies or fetches GFS surface +data files from disk, or HPSS. +========================================================================" +# +#----------------------------------------------------------------------- +# +# Define job and jobid by default for rocoto +# +#----------------------------------------------------------------------- +# +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-${NET_default}}" +export RUN="${RUN:-${RUN_default}}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" +fi + +mkdir -p ${COMOUT} + +# Create a teomporary share directory +export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" +mkdir -p ${DATA_SHARE} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY +# +#----------------------------------------------------------------------- +# +# Set sub-cycle and ensemble member names in file/diectory names +# +#----------------------------------------------------------------------- +# +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= +fi +# +#----------------------------------------------------------------------- +# +# Set sub-cycle and ensemble member names in file/diectory names +# +#----------------------------------------------------------------------- +# +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= +fi +# +#----------------------------------------------------------------------- +# +# Set the external model start time +# +#----------------------------------------------------------------------- +# +export TIME_OFFSET_HRS=${NEXUS_GFS_SFC_OFFSET_HRS:-0} +export GFS_SFC_CDATE=`$NDATE -${TIME_OFFSET_HRS} ${PDY}${cyc}` +# +#----------------------------------------------------------------------- +# +# Check whether GFS_SFC_CDATE data files are available on the specified +# cycle date and time on HPSS. +# +#----------------------------------------------------------------------- +# +CDATE_min="2021032100" +if [ "$GFS_SFC_CDATE" -lt "$CDATE_min" ]; then + print_info_msg " +======================================================================== +GFS surface data (NetCDF) are not available on HPSS for this date. +CDATE: \"${GFS_SFC_CDATE}\" +CDATE_min: \"${CDATE_min}\" + +Therefore, this task will be skipped and MERRA2 data will be used for +NEXUS_EMISSION. +========================================================================" + + exit 0 +fi +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job. +# +#----------------------------------------------------------------------- +# +export pgmout="${DATA}/OUTPUT.$$" +env + +$SCRIPTSsrw/exsrw_nexus_gfs_sfc.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi +# +#----------------------------------------------------------------------- +# +# Whether or not working directory DATA should be kept. +# +#----------------------------------------------------------------------- +# +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date + diff --git a/jobs/JSRW_NEXUS_POST_SPLIT b/jobs/JSRW_NEXUS_POST_SPLIT new file mode 100755 index 0000000000..10f4101d5c --- /dev/null +++ b/jobs/JSRW_NEXUS_POST_SPLIT @@ -0,0 +1,165 @@ +#!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This script generate final NEXUS emission netcdf file. +# +#----------------------------------------------------------------------- +# +date +export PS4='+ $SECONDS + ' +set -xue +# +#----------------------------------------------------------------------- +# +# Set the NCO standard environment variables (Table 1, pp.4) +# +#----------------------------------------------------------------------- +# +export USHsrw="${HOMEdir}/ush" +export EXECsrw="${HOMEdir}/exec" +export PARMsrw="${HOMEdir}/parm" +export SCRIPTSsrw="${HOMEdir}/scripts" +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +export USHdir="${USHsrw}" # should be removed later +. ${USHsrw}/source_util_funcs.sh +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the J-job script for the task that generates the emission files +using NEXUS which will output for FV3 (in NetCDF format). +========================================================================" +# +#----------------------------------------------------------------------- +# +# Define job and jobid by default for rocoto +# +#----------------------------------------------------------------------- +# +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-${NET_default}}" +export RUN="${RUN:-${RUN_default}}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" +fi + +mkdir -p ${COMOUT} + +# Create a teomporary share directory +export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" +mkdir -p ${DATA_SHARE} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY +# +#----------------------------------------------------------------------- +# +# Set sub-cycle and ensemble member names in file/diectory names +# +#----------------------------------------------------------------------- +# +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= +fi +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job. +# +#----------------------------------------------------------------------- +# +export pgmout="${DATA}/OUTPUT.$$" +env + +${SCRIPTSsrw}/exsrw_nexus_post_split.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi +# +#----------------------------------------------------------------------- +# +# Whether or not working directory DATA should be kept. +# +#----------------------------------------------------------------------- +# +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date + diff --git a/jobs/JSRW_POINT_SOURCE b/jobs/JSRW_POINT_SOURCE new file mode 100755 index 0000000000..6218acaa99 --- /dev/null +++ b/jobs/JSRW_POINT_SOURCE @@ -0,0 +1,162 @@ +#!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This script generate POINT SOURCE EMISSION file. +# +#----------------------------------------------------------------------- +# +date +export PS4='+ $SECONDS + ' +set -xue +# +#----------------------------------------------------------------------- +# +# Set the NCO standard environment variables (Table 1, pp.4) +# +#----------------------------------------------------------------------- +# +export USHsrw="${HOMEdir}/ush" +export EXECsrw="${HOMEdir}/exec" +export PARMsrw="${HOMEdir}/parm" +export SCRIPTSsrw="${HOMEdir}/scripts" +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +export USHdir="${USHsrw}" # should be removed later +. ${USHsrw}/source_util_funcs.sh +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm task_point_source \ + task_run_fcst ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the J-job script for the task that generates the point source files. +========================================================================" +# +#----------------------------------------------------------------------- +# +# Define job and jobid by default for rocoto +# +#----------------------------------------------------------------------- +# +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-${NET_default}}" +export RUN="${RUN:-${RUN_default}}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" +fi + +mkdir -p ${COMOUT} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY +# +#----------------------------------------------------------------------- +# +# Set sub-cycle and ensemble member names in file/diectory names +# +#----------------------------------------------------------------------- +# +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= +fi +# +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job. +# +#----------------------------------------------------------------------- +# +export pgmout="${DATA}/OUTPUT.$$" +env + +$SCRIPTSsrw/exsrw_point_source.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi +# +#----------------------------------------------------------------------- +# +# Whether or not working directory DATA should be kept. +# +#----------------------------------------------------------------------- +# +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date + diff --git a/jobs/JSRW_POST_STAT_O3 b/jobs/JSRW_POST_STAT_O3 new file mode 100755 index 0000000000..5fadd70d30 --- /dev/null +++ b/jobs/JSRW_POST_STAT_O3 @@ -0,0 +1,163 @@ +#!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This script runs POST-STAT-O3. +# +#----------------------------------------------------------------------- +# +date +export PS4='+ $SECONDS + ' +set -xue +# +#----------------------------------------------------------------------- +# +# Set the NCO standard environment variables (Table 1, pp.4) +# +#----------------------------------------------------------------------- +# +export USHsrw="${HOMEdir}/ush" +export EXECsrw="${HOMEdir}/exec" +export PARMsrw="${HOMEdir}/parm" +export SCRIPTSsrw="${HOMEdir}/scripts" +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +export USHdir="${USHsrw}" # should be removed later +. ${USHsrw}/source_util_funcs.sh +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done +. $USHdir/job_preamble.sh +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the J-job script for the task that runs POST-STAT-O3. +========================================================================" +# +#----------------------------------------------------------------------- +# +# Define job and jobid by default for rocoto +# +#----------------------------------------------------------------------- +# +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-${NET_default}}" +export RUN="${RUN:-${RUN_default}}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" +fi + +mkdir -p ${COMOUT} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY +# +#----------------------------------------------------------------------- +# +# Set sub-cycle and ensemble member names in file/diectory names +# +#----------------------------------------------------------------------- +# +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= +fi +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job. +# +#----------------------------------------------------------------------- +# +export pgmout="${DATA}/OUTPUT.$$" +env + +${SCRIPTSsrw}/exsrw_post_stat_o3.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi +# +#----------------------------------------------------------------------- +# +# Whether or not working directory DATA should be kept. +# +#----------------------------------------------------------------------- +# +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date + + diff --git a/jobs/JSRW_POST_STAT_PM25 b/jobs/JSRW_POST_STAT_PM25 new file mode 100755 index 0000000000..2d7d6e9e88 --- /dev/null +++ b/jobs/JSRW_POST_STAT_PM25 @@ -0,0 +1,161 @@ +#!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This script runs POST-STAT-PM25. +# +#----------------------------------------------------------------------- +# +date +export PS4='+ $SECONDS + ' +set -xue +# +#----------------------------------------------------------------------- +# +# Set the NCO standard environment variables (Table 1, pp.4) +# +#----------------------------------------------------------------------- +# +export USHsrw="${HOMEdir}/ush" +export EXECsrw="${HOMEdir}/exec" +export PARMsrw="${HOMEdir}/parm" +export SCRIPTSsrw="${HOMEdir}/scripts" +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +export USHdir="${USHsrw}" # should be removed later +. ${USHsrw}/source_util_funcs.sh +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the J-job script for the task that runs POST-UPP-STAT. +========================================================================" +# +#----------------------------------------------------------------------- +# +# Define job and jobid by default for rocoto +# +#----------------------------------------------------------------------- +# +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-${NET_default}}" +export RUN="${RUN:-${RUN_default}}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" +fi + +mkdir -p ${COMOUT} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY +# +#----------------------------------------------------------------------- +# +# Set sub-cycle and ensemble member names in file/diectory names +# +#----------------------------------------------------------------------- +# +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= +fi +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job. +# +#----------------------------------------------------------------------- +# +export pgmout="${DATA}/OUTPUT.$$" +env + +${SCRIPTSsrw}/exsrw_post_stat_pm25.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi +# +#----------------------------------------------------------------------- +# +# Whether or not working directory DATA should be kept. +# +#----------------------------------------------------------------------- +# +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date + diff --git a/jobs/JSRW_PRE_POST_STAT b/jobs/JSRW_PRE_POST_STAT new file mode 100755 index 0000000000..8c51e18510 --- /dev/null +++ b/jobs/JSRW_PRE_POST_STAT @@ -0,0 +1,175 @@ +#!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This script runs PRE-POST-STAT. +# +#----------------------------------------------------------------------- +# +date +export PS4='+ $SECONDS + ' +set -xue +# +#----------------------------------------------------------------------- +# +# Set the NCO standard environment variables (Table 1, pp.4) +# +#----------------------------------------------------------------------- +# +export USHsrw="${HOMEdir}/ush" +export EXECsrw="${HOMEdir}/exec" +export PARMsrw="${HOMEdir}/parm" +export SCRIPTSsrw="${HOMEdir}/scripts" +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +export USHdir="${USHsrw}" # should be removed later +. ${USHsrw}/source_util_funcs.sh +for sect in user nco platform workflow nco global cpl_aqm_parm cpl_aqm_parm ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the J-job script for the task that runs PRE-POST-STAT. +========================================================================" +# +#----------------------------------------------------------------------- +# +# Define job and jobid by default for rocoto +# +#----------------------------------------------------------------------- +# +WORKFLOW_MANAGER="${WORKFLOW_MANAGER:-rocoto}" +if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then + if [ "${SCHED}" = "slurm" ]; then + job=${SLURM_JOB_NAME} + pid=${SLURM_JOB_ID} + elif [ "${SCHED}" = "pbspro" ]; then + job=${PBS_JOBNAME} + pid=${PBS_JOBID} + else + job="task" + pid=$$ + fi + jobid="${job}.${PDY}${cyc}.${pid}" +fi +# +#----------------------------------------------------------------------- +# +# Create a temp working directory (DATA) and cd into it. +# +#----------------------------------------------------------------------- +# +export DATA="${DATA:-${DATAROOT}/${jobid}}" +mkdir -p $DATA +cd $DATA +# +#----------------------------------------------------------------------- +# +# Define NCO environment variables and set COM type definitions. +# +#----------------------------------------------------------------------- +# +export NET="${NET:-${NET_default}}" +export RUN="${RUN:-${RUN_default}}" + +[[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT +if [ "${MACHINE}" = "WCOSS2" ]; then + export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" + export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR})}" +else + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}}" +fi + +mkdir -p ${COMOUT} + +# Create a teomporary share directory +export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" +mkdir -p ${DATA_SHARE} + +# Run setpdy to initialize PDYm and PDYp variables +export cycle="${cycle:-t${cyc}z}" +setpdy.sh +. ./PDY +# +#----------------------------------------------------------------------- +# +# Set sub-cycle and ensemble member names in file/diectory names +# +#----------------------------------------------------------------------- +# +if [ ${subcyc} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" +fi +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z ${ENSMEM_INDX} ]; then + export dot_ensmem=".mem${ENSMEM_INDX}" +else + export dot_ensmem= +fi +# +#----------------------------------------------------------------------- +# +# Call the ex-script for this J-job. +# +#----------------------------------------------------------------------- +# +export pgmout="${DATA}/OUTPUT.$$" +env + +${SCRIPTSsrw}/exsrw_pre_post_stat.sh +export err=$?; err_chk + +if [ -e "$pgmout" ]; then + cat $pgmout +fi +# +#----------------------------------------------------------------------- +# +# Remove post_complete flag file. +# +#----------------------------------------------------------------------- +# +post_complete_file="${DATA_SHARE}/post_${PDY}${cyc}_task_complete.txt" +if [ -f ${post_complete_file} ] ; then + rm -f ${post_complete_file} +fi +# +#----------------------------------------------------------------------- +# +# Whether or not working directory DATA should be kept. +# +#----------------------------------------------------------------------- +# +if [ "${KEEPDATA}" = "NO" ]; then + rm -rf ${DATA} +fi +date + diff --git a/manage_externals/checkout_externals b/manage_externals/checkout_externals index a0698baef0..48bce24010 100755 --- a/manage_externals/checkout_externals +++ b/manage_externals/checkout_externals @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 """Main driver wrapper around the manic/checkout utility. diff --git a/modulefiles/build_derecho_intel.lua b/modulefiles/build_derecho_intel.lua index d434e8ecd3..491a94f912 100644 --- a/modulefiles/build_derecho_intel.lua +++ b/modulefiles/build_derecho_intel.lua @@ -6,13 +6,15 @@ the CISL machine Derecho (Cray) using Intel@2021.10.0 whatis([===[Loads libraries needed for building the UFS SRW App on Derecho ]===]) prepend_path("MODULEPATH","/lustre/desc1/scratch/epicufsrt/contrib/modulefiles_extra") -prepend_path("MODULEPATH", "/glade/work/epicufsrt/contrib/spack-stack/derecho/spack-stack-1.5.0/envs/unified-env/install/modulefiles/Core") +prepend_path("MODULEPATH", "/glade/work/epicufsrt/contrib/spack-stack/derecho/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core") load(pathJoin("stack-intel", os.getenv("stack_intel_ver") or "2021.10.0")) load(pathJoin("stack-cray-mpich", os.getenv("stack_cray_mpich_ver") or "8.1.25")) -load(pathJoin("cmake", os.getenv("cmake_ver") or "3.26.3")) +load(pathJoin("cmake", os.getenv("cmake_ver") or "3.23.1")) load("srw_common") +load(pathJoin("prod_util", os.getenv("prod_util_ver") or "2.1.1")) + setenv("CMAKE_Platform","derecho.intel") diff --git a/modulefiles/build_gaea-c5_intel.lua b/modulefiles/build_gaea_intel.lua similarity index 90% rename from modulefiles/build_gaea-c5_intel.lua rename to modulefiles/build_gaea_intel.lua index ecf21dcc8d..b47209194c 100644 --- a/modulefiles/build_gaea-c5_intel.lua +++ b/modulefiles/build_gaea_intel.lua @@ -12,7 +12,7 @@ load(pathJoin("stack-intel", stack_intel_ver)) stack_mpich_ver=os.getenv("stack_mpich_ver") or "8.1.25" load(pathJoin("stack-cray-mpich", stack_mpich_ver)) -stack_python_ver=os.getenv("stack_python_ver") or "3.10.8" +stack_python_ver=os.getenv("stack_python_ver") or "3.10.13" load(pathJoin("stack-python", stack_python_ver)) cmake_ver=os.getenv("cmake_ver") or "3.23.1" @@ -32,4 +32,4 @@ setenv("CXX","CC") setenv("CMAKE_C_COMPILER","cc") setenv("CMAKE_Fortran_COMPILER","ftn") setenv("CMAKE_CXX_COMPILER","CC") -setenv("CMAKE_Platform","gaea-c5.intel") +setenv("CMAKE_Platform","gaea.intel") diff --git a/modulefiles/build_hera_gnu.lua b/modulefiles/build_hera_gnu.lua index c1f57e2115..8854108966 100644 --- a/modulefiles/build_hera_gnu.lua +++ b/modulefiles/build_hera_gnu.lua @@ -1,25 +1,30 @@ help([[ This module loads libraries for building the UFS SRW App on -the NOAA RDHPC machine Hera using GNU 9.2.0 +the NOAA RDHPC machine Hera using GNU 13.3.0 ]]) -whatis([===[Loads libraries needed for building the UFS SRW App on Hera using GNU 9.2.0 ]===]) +whatis([===[Loads libraries needed for building the UFS SRW App on Hera using GNU 13.3.0 ]===]) -prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.0/envs/unified-env-noavx512/install/modulefiles/Core") -prepend_path("MODULEPATH", "/scratch1/NCEPDEV/jcsda/jedipara/spack-stack/modulefiles") +prepend_path("MODULEPATH", "/scratch2/NCEPDEV/stmp1/role.epic/installs/gnu/modulefiles") +prepend_path("MODULEPATH", "/scratch2/NCEPDEV/stmp1/role.epic/installs/openmpi/modulefiles") +prepend_path("MODULEPATH", "/scratch2/NCEPDEV/stmp1/role.epic/spack-stack/spack-stack-1.6.0_gnu13/envs/ufs-wm-srw-rocky8/install/modulefiles/Core") -load("stack-gcc/9.2.0") -load("stack-openmpi/4.1.5") -load("stack-python/3.10.8") +load("stack-gcc/13.3.0") +load("stack-openmpi/4.1.6") +load("stack-python/3.10.13") load("cmake/3.23.1") load("srw_common") load(pathJoin("nccmp", os.getenv("nccmp_ver") or "1.9.0.1")) -load(pathJoin("nco", os.getenv("nco_ver") or "5.0.6")) -load(pathJoin("openblas", os.getenv("openblas_ver") or "0.3.19")) +load(pathJoin("nco", os.getenv("nco_ver") or "5.1.6")) +load(pathJoin("openblas", os.getenv("openblas_ver") or "0.3.24")) -setenv("CMAKE_C_COMPILER","mpicc") -setenv("CMAKE_CXX_COMPILER","mpicxx") -setenv("CMAKE_Fortran_COMPILER","mpif90") -setenv("CMAKE_Platform","hera.gnu") +prepend_path("CPPFLAGS", " -I/apps/slurm_hera/23.11.3/include/slurm"," ") +prepend_path("LD_LIBRARY_PATH", "/apps/slurm_hera/23.11.3/lib") +setenv("LD_PRELOAD", "/scratch2/NCEPDEV/stmp1/role.epic/installs/gnu/13.3.0/lib64/libstdc++.so.6") + +setenv("CC", "mpicc") +setenv("CXX", "mpic++") +setenv("FC", "mpif90") +setenv("CMAKE_Platform", "hera.gnu") diff --git a/modulefiles/build_hera_intel.lua b/modulefiles/build_hera_intel.lua index 500d410dc5..d8e793044c 100644 --- a/modulefiles/build_hera_intel.lua +++ b/modulefiles/build_hera_intel.lua @@ -8,8 +8,7 @@ whatis([===[Loads libraries needed for building the UFS SRW App on Hera ]===]) prepend_path("MODULEPATH","/contrib/sutils/modulefiles") load("sutils") -prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.0/envs/unified-env-noavx512/install/modulefiles/Core") -prepend_path("MODULEPATH", "/scratch1/NCEPDEV/jcsda/jedipara/spack-stack/modulefiles") +prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.6.0/envs/unified-env-rocky8/install/modulefiles/Core") stack_intel_ver=os.getenv("stack_intel_ver") or "2021.5.0" load(pathJoin("stack-intel", stack_intel_ver)) @@ -17,16 +16,17 @@ load(pathJoin("stack-intel", stack_intel_ver)) stack_impi_ver=os.getenv("stack_impi_ver") or "2021.5.1" load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver)) -stack_python_ver=os.getenv("stack_python_ver") or "3.10.8" +stack_python_ver=os.getenv("stack_python_ver") or "3.10.13" load(pathJoin("stack-python", stack_python_ver)) -cmake_ver=os.getenv("cmake_ver") or "3.20.1" +cmake_ver=os.getenv("cmake_ver") or "3.23.1" load(pathJoin("cmake", cmake_ver)) load("srw_common") load(pathJoin("nccmp", os.getenv("nccmp_ver") or "1.9.0.1")) -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) +load(pathJoin("nco", os.getenv("nco_ver") or "5.0.6")) +load(pathJoin("prod_util", os.getenv("prod_util_ver") or "2.1.1")) setenv("CMAKE_C_COMPILER","mpiicc") setenv("CMAKE_CXX_COMPILER","mpiicpc") diff --git a/modulefiles/build_hercules_intel.lua b/modulefiles/build_hercules_intel.lua index 1cb402b8a6..b65890f1c4 100644 --- a/modulefiles/build_hercules_intel.lua +++ b/modulefiles/build_hercules_intel.lua @@ -5,18 +5,18 @@ the MSU machine Hercules using intel-oneapi-compilers/2022.2.1 whatis([===[Loads libraries needed for building the UFS SRW App on Hercules ]===]) -prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.5.0/envs/unified-env/install/modulefiles/Core") -prepend_path("MODULEPATH", "/work/noaa/da/role-da/spack-stack/modulefiles") +prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core") load("stack-intel/2021.9.0") load("stack-intel-oneapi-mpi/2021.9.0") -load("stack-python/3.10.8") -load("cmake/3.26.3") +load("stack-python/3.10.13") +load("cmake/3.23.1") load("srw_common") load("nccmp/1.9.0.1") load("nco/5.0.6") +load(pathJoin("prod_util", os.getenv("prod_util_ver") or "2.1.1")) setenv("CFLAGS","-diag-disable=10441") setenv("FFLAGS","-diag-disable=10441") diff --git a/modulefiles/build_jet_intel.lua b/modulefiles/build_jet_intel.lua index e687531ac8..854b4404cb 100644 --- a/modulefiles/build_jet_intel.lua +++ b/modulefiles/build_jet_intel.lua @@ -5,17 +5,15 @@ the NOAA RDHPC machine Jet using Intel-2021.5.0 whatis([===[Loads libraries needed for building the UFS SRW App on Jet ]===]) -prepend_path("MODULEPATH","/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.5.0/envs/unified-env/install/modulefiles/Core") -prepend_path("MODULEPATH", "/lfs4/HFIP/hfv3gfs/spack-stack/modulefiles") +prepend_path("MODULEPATH","/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.6.0/envs/unified-env-rocky8/install/modulefiles/Core") load("stack-intel/2021.5.0") load("stack-intel-oneapi-mpi/2021.5.1") -load("stack-python/3.10.8") +load("stack-python/3.10.13") load("cmake/3.23.1") load("srw_common") -load("prod-util/1.2.2") load("nccmp/1.9.0.1") load("nco/5.0.6") diff --git a/modulefiles/build_noaacloud_intel.lua b/modulefiles/build_noaacloud_intel.lua index 50f1aec9ab..dd774e8ed9 100644 --- a/modulefiles/build_noaacloud_intel.lua +++ b/modulefiles/build_noaacloud_intel.lua @@ -5,7 +5,7 @@ the NOAA cloud using Intel-oneapi whatis([===[Loads libraries needed for building the UFS SRW App on NOAA cloud ]===]) -prepend_path("MODULEPATH", "/contrib/spack-stack/spack-stack-1.5.0/envs/unified-env/install/modulefiles/Core") +prepend_path("MODULEPATH", "/contrib/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core") prepend_path("MODULEPATH", "/apps/modules/modulefiles") prepend_path("PATH", "/contrib/EPIC/bin") load("stack-intel") diff --git a/modulefiles/build_orion_intel.lua b/modulefiles/build_orion_intel.lua index 241b658c88..b2f3d85c00 100644 --- a/modulefiles/build_orion_intel.lua +++ b/modulefiles/build_orion_intel.lua @@ -1,23 +1,25 @@ help([[ This module loads libraries for building the UFS SRW App on -the MSU machine Orion using Intel-2022.1.2 +the MSU machine Orion using intel-oneapi-compilers/2021.9.0 ]]) whatis([===[Loads libraries needed for building the UFS SRW App on Orion ]===]) -prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.5.0/envs/unified-env/install/modulefiles/Core") -prepend_path("MODULEPATH", "/work/noaa/da/role-da/spack-stack/modulefiles") +prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.6.0/envs/unified-env-rocky9/install/modulefiles/Core") -load("stack-intel/2022.0.2") -load("stack-intel-oneapi-mpi/2021.5.1") -load("stack-python/3.10.8") -load("cmake/3.22.1") +load("stack-intel/2021.9.0") +load("stack-intel-oneapi-mpi/2021.9.0") +load("stack-python/3.10.13") +load("cmake/3.23.1") load("srw_common") load("nccmp/1.9.0.1") load("nco/5.0.6") -load("wget") +load(pathJoin("prod_util", os.getenv("prod_util_ver") or "2.1.1")) + +setenv("CFLAGS","-diag-disable=10441") +setenv("FFLAGS","-diag-disable=10441") setenv("CMAKE_C_COMPILER","mpiicc") setenv("CMAKE_CXX_COMPILER","mpiicpc") diff --git a/modulefiles/python_srw_cmaq.lua b/modulefiles/python_srw_aqm.lua similarity index 100% rename from modulefiles/python_srw_cmaq.lua rename to modulefiles/python_srw_aqm.lua diff --git a/modulefiles/srw_common.lua b/modulefiles/srw_common.lua index 2bcbc1f5f7..cb2047cbe1 100644 --- a/modulefiles/srw_common.lua +++ b/modulefiles/srw_common.lua @@ -3,21 +3,21 @@ load("zlib/1.2.13") load("libpng/1.6.37") load("netcdf-c/4.9.2") -load("netcdf-fortran/4.6.0") +load("netcdf-fortran/4.6.1") load("parallelio/2.5.10") -load("esmf/8.4.2") -load("fms/2023.01") +load("esmf/8.6.0") +load("fms/2023.04") load("bacio/2.4.1") -load("crtm/2.4.0") +load("crtm/2.4.0.1") load("g2/3.4.5") load("g2tmpl/1.10.2") load("ip/4.3.0") -load("sp/2.3.3") +load("sp/2.5.0") load("w3emc/2.10.0") -load("gftl-shared/1.5.0") -load("mapl/2.35.2-esmf-8.4.2") +load("gftl-shared/1.6.1") +load("mapl/2.40.3-esmf-8.6.0") load("nemsio/2.5.4") load("sfcio/1.4.1") diff --git a/modulefiles/tasks/cheyenne/aqm_ics.local.lua b/modulefiles/tasks/cheyenne/aqm_ics.local.lua index 1040aab9a6..9c9f0ca3d5 100644 --- a/modulefiles/tasks/cheyenne/aqm_ics.local.lua +++ b/modulefiles/tasks/cheyenne/aqm_ics.local.lua @@ -1,3 +1,3 @@ load(pathJoin("cmake", os.getenv("cmake_ver") or "3.22.0")) load("nco/4.9.5") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/cheyenne/aqm_lbcs.local.lua b/modulefiles/tasks/cheyenne/aqm_lbcs.local.lua index 1040aab9a6..9c9f0ca3d5 100644 --- a/modulefiles/tasks/cheyenne/aqm_lbcs.local.lua +++ b/modulefiles/tasks/cheyenne/aqm_lbcs.local.lua @@ -1,3 +1,3 @@ load(pathJoin("cmake", os.getenv("cmake_ver") or "3.22.0")) load("nco/4.9.5") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/cheyenne/fire_emission.local.lua b/modulefiles/tasks/cheyenne/fire_emission.local.lua index b62670156f..86252a9a4f 100644 --- a/modulefiles/tasks/cheyenne/fire_emission.local.lua +++ b/modulefiles/tasks/cheyenne/fire_emission.local.lua @@ -1,2 +1,2 @@ load("ncarenv") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/cheyenne/nexus_emission.local.lua b/modulefiles/tasks/cheyenne/nexus_emission.local.lua index c46ead59a9..3c690fa12a 100644 --- a/modulefiles/tasks/cheyenne/nexus_emission.local.lua +++ b/modulefiles/tasks/cheyenne/nexus_emission.local.lua @@ -2,4 +2,4 @@ load("nco/4.9.5") load("mpt/2.25") load("ncarenv") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/cheyenne/nexus_gfs_sfc.local.lua b/modulefiles/tasks/cheyenne/nexus_gfs_sfc.local.lua index b62670156f..86252a9a4f 100644 --- a/modulefiles/tasks/cheyenne/nexus_gfs_sfc.local.lua +++ b/modulefiles/tasks/cheyenne/nexus_gfs_sfc.local.lua @@ -1,2 +1,2 @@ load("ncarenv") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/cheyenne/nexus_post_split.local.lua b/modulefiles/tasks/cheyenne/nexus_post_split.local.lua index c957eff552..e3f4bbe95d 100644 --- a/modulefiles/tasks/cheyenne/nexus_post_split.local.lua +++ b/modulefiles/tasks/cheyenne/nexus_post_split.local.lua @@ -1,3 +1,3 @@ load(pathJoin("nco", os.getenv("nco_ver") or "4.9.5")) load("ncarenv") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/cheyenne/plot_allvars.local.lua b/modulefiles/tasks/cheyenne/plot_allvars.local.lua index b49b8bb863..7cee04231e 100644 --- a/modulefiles/tasks/cheyenne/plot_allvars.local.lua +++ b/modulefiles/tasks/cheyenne/plot_allvars.local.lua @@ -1,3 +1,3 @@ unload("python") load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/cheyenne/point_source.local.lua b/modulefiles/tasks/cheyenne/point_source.local.lua index b62670156f..86252a9a4f 100644 --- a/modulefiles/tasks/cheyenne/point_source.local.lua +++ b/modulefiles/tasks/cheyenne/point_source.local.lua @@ -1,2 +1,2 @@ load("ncarenv") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/cheyenne/pre_post_stat.local.lua b/modulefiles/tasks/cheyenne/pre_post_stat.local.lua index 7dcdc5969b..042eb2f732 100644 --- a/modulefiles/tasks/cheyenne/pre_post_stat.local.lua +++ b/modulefiles/tasks/cheyenne/pre_post_stat.local.lua @@ -1,2 +1,2 @@ load("nco/4.9.5") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/aqm_ics.local.lua b/modulefiles/tasks/derecho/aqm_ics.local.lua index 26b28db2c5..9b519c10f6 100644 --- a/modulefiles/tasks/derecho/aqm_ics.local.lua +++ b/modulefiles/tasks/derecho/aqm_ics.local.lua @@ -1,2 +1,2 @@ -load("nco/5.0.6") -load("python_srw_cmaq") +load("nco/5.1.9") +load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/aqm_lbcs.local.lua b/modulefiles/tasks/derecho/aqm_lbcs.local.lua index 26b28db2c5..9b519c10f6 100644 --- a/modulefiles/tasks/derecho/aqm_lbcs.local.lua +++ b/modulefiles/tasks/derecho/aqm_lbcs.local.lua @@ -1,2 +1,2 @@ -load("nco/5.0.6") -load("python_srw_cmaq") +load("nco/5.1.9") +load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/fire_emission.local.lua b/modulefiles/tasks/derecho/fire_emission.local.lua index b62670156f..df0e35d5da 100644 --- a/modulefiles/tasks/derecho/fire_emission.local.lua +++ b/modulefiles/tasks/derecho/fire_emission.local.lua @@ -1,2 +1 @@ -load("ncarenv") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/nexus_emission.local.lua b/modulefiles/tasks/derecho/nexus_emission.local.lua index 09f38a17dd..9b519c10f6 100644 --- a/modulefiles/tasks/derecho/nexus_emission.local.lua +++ b/modulefiles/tasks/derecho/nexus_emission.local.lua @@ -1,4 +1,2 @@ -load("nco/5.0.6") - -load("ncarenv") -load("python_srw_cmaq") +load("nco/5.1.9") +load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua b/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua index b62670156f..df0e35d5da 100644 --- a/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua +++ b/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua @@ -1,2 +1 @@ -load("ncarenv") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/nexus_post_split.local.lua b/modulefiles/tasks/derecho/nexus_post_split.local.lua index a03758c9c6..9b519c10f6 100644 --- a/modulefiles/tasks/derecho/nexus_post_split.local.lua +++ b/modulefiles/tasks/derecho/nexus_post_split.local.lua @@ -1,3 +1,2 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "5.0.6")) -load("ncarenv") -load("python_srw_cmaq") +load("nco/5.1.9") +load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/plot_allvars.local.lua b/modulefiles/tasks/derecho/plot_allvars.local.lua index b49b8bb863..7cee04231e 100644 --- a/modulefiles/tasks/derecho/plot_allvars.local.lua +++ b/modulefiles/tasks/derecho/plot_allvars.local.lua @@ -1,3 +1,3 @@ unload("python") load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/derecho/point_source.local.lua b/modulefiles/tasks/derecho/point_source.local.lua index b62670156f..df0e35d5da 100644 --- a/modulefiles/tasks/derecho/point_source.local.lua +++ b/modulefiles/tasks/derecho/point_source.local.lua @@ -1,2 +1 @@ -load("ncarenv") -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/pre_post_stat.local.lua b/modulefiles/tasks/derecho/pre_post_stat.local.lua index 26b28db2c5..9b519c10f6 100644 --- a/modulefiles/tasks/derecho/pre_post_stat.local.lua +++ b/modulefiles/tasks/derecho/pre_post_stat.local.lua @@ -1,2 +1,2 @@ -load("nco/5.0.6") -load("python_srw_cmaq") +load("nco/5.1.9") +load("python_srw_aqm") diff --git a/modulefiles/tasks/derecho/python_srw.lua b/modulefiles/tasks/derecho/python_srw.lua deleted file mode 100644 index fe6c73a7d5..0000000000 --- a/modulefiles/tasks/derecho/python_srw.lua +++ /dev/null @@ -1,3 +0,0 @@ -unload("python") -load("conda") -setenv("SRW_ENV", "srw_app") diff --git a/modulefiles/tasks/gaea-c5/plot_allvars.local.lua b/modulefiles/tasks/gaea-c5/plot_allvars.local.lua deleted file mode 100644 index 624b869bdb..0000000000 --- a/modulefiles/tasks/gaea-c5/plot_allvars.local.lua +++ /dev/null @@ -1,6 +0,0 @@ -unload("miniconda3") -unload("python") -prepend_path("MODULEPATH","/ncrc/proj/epic/miniconda3/modulefiles") -load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) - -setenv("SRW_ENV", "regional_workflow") diff --git a/modulefiles/tasks/gaea-c5/python_srw.lua b/modulefiles/tasks/gaea-c5/python_srw.lua deleted file mode 100644 index b6107cc465..0000000000 --- a/modulefiles/tasks/gaea-c5/python_srw.lua +++ /dev/null @@ -1,8 +0,0 @@ -unload("miniconda3") -unload("python") -prepend_path("MODULEPATH","/ncrc/proj/epic/miniconda3/modulefiles") -load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) - -setenv("SRW_ENV", "workflow_tools") - -load("darshan-runtime/3.4.0") diff --git a/modulefiles/tasks/gaea/plot_allvars.local.lua b/modulefiles/tasks/gaea/plot_allvars.local.lua new file mode 100644 index 0000000000..41da34ecca --- /dev/null +++ b/modulefiles/tasks/gaea/plot_allvars.local.lua @@ -0,0 +1,4 @@ +unload("python") +load("conda") + +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/gaea/python_srw.lua b/modulefiles/tasks/gaea/python_srw.lua new file mode 100644 index 0000000000..5058b3f615 --- /dev/null +++ b/modulefiles/tasks/gaea/python_srw.lua @@ -0,0 +1,7 @@ +load("darshan-runtime/3.4.0") +unload("python") +load("conda") + +setenv("SRW_ENV", "srw_app") +setenv("LD_PRELOAD", "/opt/cray/pe/gcc/12.2.0/snos/lib64/libstdc++.so.6") + diff --git a/modulefiles/tasks/gaea-c5/run_vx.local.lua b/modulefiles/tasks/gaea/run_vx.local.lua similarity index 87% rename from modulefiles/tasks/gaea-c5/run_vx.local.lua rename to modulefiles/tasks/gaea/run_vx.local.lua index cb64e9a38a..57cdfbb1cc 100644 --- a/modulefiles/tasks/gaea-c5/run_vx.local.lua +++ b/modulefiles/tasks/gaea/run_vx.local.lua @@ -22,4 +22,6 @@ if (mode() == "unload") then unload(pathJoin("met", met_ver)) unload(pathJoin("metplus",metplus_ver)) end -load("python_srw") +load("conda") +setenv("SRW_ENV", "srw_app") +setenv("LD_PRELOAD", "/opt/cray/pe/gcc/12.2.0/snos/lib64/libstdc++.so.6") diff --git a/modulefiles/tasks/hera/aqm_ics.local.lua b/modulefiles/tasks/hera/aqm_ics.local.lua index 0e7132d749..2eb2ea2ee0 100644 --- a/modulefiles/tasks/hera/aqm_ics.local.lua +++ b/modulefiles/tasks/hera/aqm_ics.local.lua @@ -1,2 +1,2 @@ -load("python_srw_cmaq") -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) +load("python_srw_aqm") +load(pathJoin("nco", os.getenv("nco_ver") or "5.1.6")) diff --git a/modulefiles/tasks/hera/aqm_lbcs.local.lua b/modulefiles/tasks/hera/aqm_lbcs.local.lua index 5a7b0cece6..2eb2ea2ee0 100644 --- a/modulefiles/tasks/hera/aqm_lbcs.local.lua +++ b/modulefiles/tasks/hera/aqm_lbcs.local.lua @@ -1,2 +1,2 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("miniconda_regional_workflow_cmaq") +load("python_srw_aqm") +load(pathJoin("nco", os.getenv("nco_ver") or "5.1.6")) diff --git a/modulefiles/tasks/hera/fire_emission.local.lua b/modulefiles/tasks/hera/fire_emission.local.lua index 8aa737aa65..68d6f14832 100644 --- a/modulefiles/tasks/hera/fire_emission.local.lua +++ b/modulefiles/tasks/hera/fire_emission.local.lua @@ -1,3 +1,3 @@ load("hpss") -load("python_srw_cmaq") -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) +load("python_srw_aqm") +load(pathJoin("nco", os.getenv("nco_ver") or "5.1.6")) diff --git a/modulefiles/tasks/hera/nexus_emission.local.lua b/modulefiles/tasks/hera/nexus_emission.local.lua index c7ac9dcb90..d1f95e6d31 100644 --- a/modulefiles/tasks/hera/nexus_emission.local.lua +++ b/modulefiles/tasks/hera/nexus_emission.local.lua @@ -1,2 +1,2 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("python_srw_cmaq") +load(pathJoin("nco", os.getenv("nco_ver") or "5.1.6")) +load("python_srw_aqm") diff --git a/modulefiles/tasks/hera/nexus_post_split.local.lua b/modulefiles/tasks/hera/nexus_post_split.local.lua index 0e7132d749..2eb2ea2ee0 100644 --- a/modulefiles/tasks/hera/nexus_post_split.local.lua +++ b/modulefiles/tasks/hera/nexus_post_split.local.lua @@ -1,2 +1,2 @@ -load("python_srw_cmaq") -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) +load("python_srw_aqm") +load(pathJoin("nco", os.getenv("nco_ver") or "5.1.6")) diff --git a/modulefiles/tasks/hera/plot_allvars.local.lua b/modulefiles/tasks/hera/plot_allvars.local.lua index b7e9528710..85291013c7 100644 --- a/modulefiles/tasks/hera/plot_allvars.local.lua +++ b/modulefiles/tasks/hera/plot_allvars.local.lua @@ -1,2 +1,2 @@ load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/hera/point_source.local.lua b/modulefiles/tasks/hera/point_source.local.lua index 89feda226c..df0e35d5da 100644 --- a/modulefiles/tasks/hera/point_source.local.lua +++ b/modulefiles/tasks/hera/point_source.local.lua @@ -1 +1 @@ -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/hera/pre_post_stat.local.lua b/modulefiles/tasks/hera/pre_post_stat.local.lua index 23370a8d60..ede4c61606 100644 --- a/modulefiles/tasks/hera/pre_post_stat.local.lua +++ b/modulefiles/tasks/hera/pre_post_stat.local.lua @@ -1 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) +load(pathJoin("nco", os.getenv("nco_ver") or "5.1.6")) diff --git a/modulefiles/tasks/hercules/aqm_ics.local.lua b/modulefiles/tasks/hercules/aqm_ics.local.lua index c7ac9dcb90..df0e35d5da 100644 --- a/modulefiles/tasks/hercules/aqm_ics.local.lua +++ b/modulefiles/tasks/hercules/aqm_ics.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/hercules/aqm_lbcs.local.lua b/modulefiles/tasks/hercules/aqm_lbcs.local.lua index 5a7b0cece6..df0e35d5da 100644 --- a/modulefiles/tasks/hercules/aqm_lbcs.local.lua +++ b/modulefiles/tasks/hercules/aqm_lbcs.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("miniconda_regional_workflow_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/hercules/fire_emission.local.lua b/modulefiles/tasks/hercules/fire_emission.local.lua index c7ac9dcb90..df0e35d5da 100644 --- a/modulefiles/tasks/hercules/fire_emission.local.lua +++ b/modulefiles/tasks/hercules/fire_emission.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/hercules/nexus_emission.local.lua b/modulefiles/tasks/hercules/nexus_emission.local.lua index c7ac9dcb90..df0e35d5da 100644 --- a/modulefiles/tasks/hercules/nexus_emission.local.lua +++ b/modulefiles/tasks/hercules/nexus_emission.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/hercules/nexus_post_split.local.lua b/modulefiles/tasks/hercules/nexus_post_split.local.lua index c7ac9dcb90..df0e35d5da 100644 --- a/modulefiles/tasks/hercules/nexus_post_split.local.lua +++ b/modulefiles/tasks/hercules/nexus_post_split.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/hercules/plot_allvars.local.lua b/modulefiles/tasks/hercules/plot_allvars.local.lua index b49b8bb863..7cee04231e 100644 --- a/modulefiles/tasks/hercules/plot_allvars.local.lua +++ b/modulefiles/tasks/hercules/plot_allvars.local.lua @@ -1,3 +1,3 @@ unload("python") load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/hercules/point_source.local.lua b/modulefiles/tasks/hercules/point_source.local.lua index 89feda226c..df0e35d5da 100644 --- a/modulefiles/tasks/hercules/point_source.local.lua +++ b/modulefiles/tasks/hercules/point_source.local.lua @@ -1 +1 @@ -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/hercules/python_srw.lua b/modulefiles/tasks/hercules/python_srw.lua deleted file mode 100644 index fe6c73a7d5..0000000000 --- a/modulefiles/tasks/hercules/python_srw.lua +++ /dev/null @@ -1,3 +0,0 @@ -unload("python") -load("conda") -setenv("SRW_ENV", "srw_app") diff --git a/modulefiles/tasks/jet/plot_allvars.local.lua b/modulefiles/tasks/jet/plot_allvars.local.lua index b7e9528710..85291013c7 100644 --- a/modulefiles/tasks/jet/plot_allvars.local.lua +++ b/modulefiles/tasks/jet/plot_allvars.local.lua @@ -1,2 +1,2 @@ load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/noaacloud/plot_allvars.local.lua b/modulefiles/tasks/noaacloud/plot_allvars.local.lua index cc122f69b2..85291013c7 100644 --- a/modulefiles/tasks/noaacloud/plot_allvars.local.lua +++ b/modulefiles/tasks/noaacloud/plot_allvars.local.lua @@ -1,5 +1,2 @@ -unload("python") -append_path("MODULEPATH","/contrib/EPIC/miniconda3/modulefiles") -load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) - -setenv("SRW_ENV", "regional_workflow") +load("conda") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/noaacloud/python_srw.lua b/modulefiles/tasks/noaacloud/python_srw.lua index a2dd45084c..e6e4268c35 100644 --- a/modulefiles/tasks/noaacloud/python_srw.lua +++ b/modulefiles/tasks/noaacloud/python_srw.lua @@ -1,2 +1,7 @@ load("conda") setenv("SRW_ENV", "srw_app") + +-- Add missing libstdc binary for Azure +if os.getenv("PW_CSP") == "azure" then + setenv("LD_PRELOAD","/opt/nvidia/nsight-systems/2023.1.2/host-linux-x64/libstdc++.so.6") +end diff --git a/modulefiles/tasks/noaacloud/run_vx.local.lua b/modulefiles/tasks/noaacloud/run_vx.local.lua index 737fc4f7cc..67b1b98ad6 100644 --- a/modulefiles/tasks/noaacloud/run_vx.local.lua +++ b/modulefiles/tasks/noaacloud/run_vx.local.lua @@ -25,3 +25,8 @@ end load("ufs-pyenv") load("conda") setenv("SRW_ENV", "srw_app") + +-- Add missing libstdc binary for Azure +if os.getenv("PW_CSP") == "azure" then + setenv("LD_PRELOAD","/opt/nvidia/nsight-systems/2023.1.2/host-linux-x64/libstdc++.so.6") +end diff --git a/modulefiles/tasks/orion/aqm_ics.local.lua b/modulefiles/tasks/orion/aqm_ics.local.lua index c7ac9dcb90..df0e35d5da 100644 --- a/modulefiles/tasks/orion/aqm_ics.local.lua +++ b/modulefiles/tasks/orion/aqm_ics.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/orion/aqm_lbcs.local.lua b/modulefiles/tasks/orion/aqm_lbcs.local.lua index 5a7b0cece6..df0e35d5da 100644 --- a/modulefiles/tasks/orion/aqm_lbcs.local.lua +++ b/modulefiles/tasks/orion/aqm_lbcs.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("miniconda_regional_workflow_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/orion/fire_emission.local.lua b/modulefiles/tasks/orion/fire_emission.local.lua index c7ac9dcb90..df0e35d5da 100644 --- a/modulefiles/tasks/orion/fire_emission.local.lua +++ b/modulefiles/tasks/orion/fire_emission.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/orion/nexus_emission.local.lua b/modulefiles/tasks/orion/nexus_emission.local.lua index c7ac9dcb90..df0e35d5da 100644 --- a/modulefiles/tasks/orion/nexus_emission.local.lua +++ b/modulefiles/tasks/orion/nexus_emission.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/orion/nexus_post_split.local.lua b/modulefiles/tasks/orion/nexus_post_split.local.lua index c7ac9dcb90..df0e35d5da 100644 --- a/modulefiles/tasks/orion/nexus_post_split.local.lua +++ b/modulefiles/tasks/orion/nexus_post_split.local.lua @@ -1,2 +1 @@ -load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3")) -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/orion/plot_allvars.local.lua b/modulefiles/tasks/orion/plot_allvars.local.lua index b49b8bb863..7cee04231e 100644 --- a/modulefiles/tasks/orion/plot_allvars.local.lua +++ b/modulefiles/tasks/orion/plot_allvars.local.lua @@ -1,3 +1,3 @@ unload("python") load("conda") -setenv("SRW_ENV", "srw_graphics") +setenv("SRW_GRAPHICS_ENV", "srw_graphics") diff --git a/modulefiles/tasks/orion/point_source.local.lua b/modulefiles/tasks/orion/point_source.local.lua index 89feda226c..df0e35d5da 100644 --- a/modulefiles/tasks/orion/point_source.local.lua +++ b/modulefiles/tasks/orion/point_source.local.lua @@ -1 +1 @@ -load("python_srw_cmaq") +load("python_srw_aqm") diff --git a/modulefiles/tasks/orion/python_srw.lua b/modulefiles/tasks/orion/python_srw.lua deleted file mode 100644 index fe6c73a7d5..0000000000 --- a/modulefiles/tasks/orion/python_srw.lua +++ /dev/null @@ -1,3 +0,0 @@ -unload("python") -load("conda") -setenv("SRW_ENV", "srw_app") diff --git a/modulefiles/tasks/orion/run_vx.local.lua b/modulefiles/tasks/orion/run_vx.local.lua index 1fa9617365..737fc4f7cc 100644 --- a/modulefiles/tasks/orion/run_vx.local.lua +++ b/modulefiles/tasks/orion/run_vx.local.lua @@ -1,8 +1,6 @@ --[[ Compiler-specific modules are used for met and metplus libraries --]] -load("build_orion_intel") - local met_ver = (os.getenv("met_ver") or "11.1.0") local metplus_ver = (os.getenv("metplus_ver") or "5.1.0") if (mode() == "load") then @@ -20,11 +18,10 @@ setenv("METPLUS_VERSION", metplus_ver) setenv("METPLUS_ROOT", base_metplus) setenv("METPLUS_PATH", base_metplus) - if (mode() == "unload") then unload(pathJoin("met", met_ver)) unload(pathJoin("metplus",metplus_ver)) end ---load("ufs-pyenv") -load("stack-python/3.10.8") -load("python_srw") +load("ufs-pyenv") +load("conda") +setenv("SRW_ENV", "srw_app") diff --git a/modulefiles/wflow_derecho.lua b/modulefiles/wflow_derecho.lua index d9a3e24e2f..28bc7ec2f6 100644 --- a/modulefiles/wflow_derecho.lua +++ b/modulefiles/wflow_derecho.lua @@ -5,8 +5,6 @@ on the CISL machine Derecho (Cray) whatis([===[Loads libraries for running the UFS SRW Workflow on Derecho ]===]) -load("ncarenv") - append_path("MODULEPATH","/glade/work/epicufsrt/contrib/derecho/rocoto/modulefiles") load("rocoto") diff --git a/modulefiles/wflow_gaea-c5.lua b/modulefiles/wflow_gaea.lua similarity index 68% rename from modulefiles/wflow_gaea-c5.lua rename to modulefiles/wflow_gaea.lua index 3073aa0522..6c24672c30 100644 --- a/modulefiles/wflow_gaea-c5.lua +++ b/modulefiles/wflow_gaea.lua @@ -6,16 +6,15 @@ the NOAA RDHPC machine Gaea C5 whatis([===[Loads libraries needed for running the UFS SRW App on gaea ]===]) unload("python") -load("set_pythonpath") -prepend_path("MODULEPATH","/ncrc/proj/epic/miniconda3/modulefiles/") -load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0")) prepend_path("MODULEPATH","/ncrc/proj/epic/rocoto/modulefiles/") load("rocoto") +load("conda") pushenv("MKLROOT", "/opt/intel/oneapi/mkl/2023.1.0/") +setenv("LD_PRELOAD", "/opt/cray/pe/gcc/12.2.0/snos/lib64/libstdc++.so.6") if mode() == "load" then LmodMsgRaw([===[Please do the following to activate conda: - > conda activate workflow_tools + > conda activate srw_app ]===]) end diff --git a/modulefiles/wflow_noaacloud.lua b/modulefiles/wflow_noaacloud.lua index ebf907545b..5e0c0ca50a 100644 --- a/modulefiles/wflow_noaacloud.lua +++ b/modulefiles/wflow_noaacloud.lua @@ -8,15 +8,15 @@ whatis([===[Loads libraries needed for running the UFS SRW App on NOAA cloud ]== prepend_path("MODULEPATH","/apps/modules/modulefiles") load("rocoto") - - load("conda") -setenv("PROJ_LIB","/contrib/EPIC/miniconda3/4.12.0/envs/regional_workflow/share/proj") -setenv("OPT","/contrib/EPIC/hpc-modules") -append_path("PATH","/contrib/EPIC/miniconda3/4.12.0/envs/regional_workflow/bin") prepend_path("PATH","/contrib/EPIC/bin") +-- Add missing libstdc binary for Azure +if os.getenv("PW_CSP") == "azure" then + setenv("LD_PRELOAD","/opt/nvidia/nsight-systems/2023.1.2/host-linux-x64/libstdc++.so.6") +end + if mode() == "load" then LmodMsgRaw([===[Please do the following to activate conda: > conda activate srw_app diff --git a/modulefiles/wflow_orion.lua b/modulefiles/wflow_orion.lua index 711991bb09..8bbc5663da 100644 --- a/modulefiles/wflow_orion.lua +++ b/modulefiles/wflow_orion.lua @@ -6,9 +6,8 @@ the MSU machine Orion whatis([===[Loads libraries needed for running SRW on Orion ]===]) load("contrib") -load("rocoto") -load("wget") - +load("ruby/3.2.3") +load("rocoto/1.3.7") unload("python") load("conda") diff --git a/parm/FV3.input.yml b/parm/FV3.input.yml index e2df4a2a9e..efb6c85f5b 100644 --- a/parm/FV3.input.yml +++ b/parm/FV3.input.yml @@ -4,12 +4,7 @@ # parm/input.nml.FV3 # # to obtain the namelist for each physics suite that the SRW App can -# run with. To build a namelist for one of these configurations, use -# the Python helper script -# -# ush/set_namelist.py -# -# and provide this file and the desired section via the -c option. +# run with. FV3_RRFS_v1beta: @@ -83,11 +78,11 @@ FV3_HRRR: <<: *RRFS_v1beta_phys cdmbgwd: [3.5, 1.0] do_mynnsfclay: True - do_sfcperts: !!python/none + do_sfcperts: null gwd_opt: 3 do_gsl_drag_ss: True do_gsl_drag_tofd: True - do_gsl_drag_ls_bl: True + do_gsl_drag_ls_bl: True iaer: 5111 icliq_sw: 2 iovr: 3 @@ -102,8 +97,8 @@ FV3_HRRR: mosaic_lu: 0 mosaic_soil: 0 thsfc_loc: False - nst_anl: - nstf_name: + nst_anl: null + nstf_name: null FV3_RAP: fv_core_nml: @@ -112,7 +107,7 @@ FV3_RAP: <<: *RRFS_v1beta_phys cdmbgwd: [3.5, 1.0] do_mynnsfclay: True - do_sfcperts: !!python/none + do_sfcperts: null gwd_opt: 3 do_gsl_drag_ss: True do_gsl_drag_tofd: True @@ -140,40 +135,40 @@ FV3_GFS_2017_gfdlmp: k_split: 6 n_split: 6 nord: 2 - nord_zs_filter: !!python/none + nord_zs_filter: null range_warn: False vtdm4: 0.075 gfs_physics_nml: &gfs_2017_gfdlmp_phys avg_max_length: 3600.0 - bl_mynn_tkeadvect: !!python/none - bl_mynn_edmf: !!python/none - bl_mynn_edmf_mom: !!python/none + bl_mynn_tkeadvect: null + bl_mynn_edmf: null + bl_mynn_edmf_mom: null cdmbgwd: [3.5, 0.01] - cplflx: !!python/none + cplflx: null do_deep: False - do_mynnedmf: !!python/none - do_mynnsfclay: !!python/none + do_mynnedmf: null + do_mynnsfclay: null fhcyc: 0.0 fhlwr: 3600.0 fhswr: 3600.0 hybedmf: True - icloud_bl: !!python/none + icloud_bl: null imfdeepcnv: 2 imfshalcnv: 2 imp_physics: 11 lgfdlmprad: True - lheatstrg: !!python/none - lndp_type: !!python/none - lsm: !!python/none - lsoil: !!python/none - lsoil_lsm: !!python/none - ltaerosol: !!python/none - n_var_lndp: !!python/none + lheatstrg: null + lndp_type: null + lsm: null + lsoil: null + lsoil_lsm: null + ltaerosol: null + n_var_lndp: null oz_phys: True oz_phys_2015: False - satmedmf: !!python/none + satmedmf: null shal_cnv: True - ttendlim: !!python/none + ttendlim: null gfdl_cloud_microphysics_nml: &gfs_gfdl_cloud_mp c_cracw: 0.8 c_paut: 0.5 @@ -272,7 +267,7 @@ FV3_GFS_v15p2: kord_wz: 9 n_split: 8 n_sponge: 30 - nord_zs_filter: !!python/none + nord_zs_filter: null nudge_qv: True range_warn: False rf_cutoff: 750.0 @@ -283,16 +278,16 @@ FV3_GFS_v15p2: tau_l2v: 225.0 tau_v2l: 150.0 gfs_physics_nml: &gfs_v15_gfs_physics - bl_mynn_edmf: !!python/none - bl_mynn_edmf_mom: !!python/none - bl_mynn_tkeadvect: !!python/none + bl_mynn_edmf: null + bl_mynn_edmf_mom: null + bl_mynn_tkeadvect: null cnvcld: True cnvgwd: True - cplflx: !!python/none + cplflx: null do_myjpbl: False do_myjsfc: False - do_mynnedmf: !!python/none - do_mynnsfclay: !!python/none + do_mynnedmf: null + do_mynnsfclay: null do_tofd: False do_ugwp: False do_ysu: False @@ -300,12 +295,12 @@ FV3_GFS_v15p2: fhlwr: 3600.0 fhswr: 3600.0 hybedmf: True - iau_delthrs: !!python/none - iaufhrs: !!python/none + iau_delthrs: null + iaufhrs: null imfdeepcnv: 2 imfshalcnv: 2 imp_physics: 11 - icloud_bl: !!python/none + icloud_bl: null iopt_alb: 2 iopt_btr: 1 iopt_crs: 1 @@ -321,28 +316,28 @@ FV3_GFS_v15p2: iopt_trs: 2 ldiag_ugwp: False lgfdlmprad: True - lradar: !!python/none + lradar: null lsm: 1 - lsoil: !!python/none - lsoil_lsm: !!python/none - ltaerosol: !!python/none + lsoil: null + lsoil_lsm: null + ltaerosol: null shal_cnv: True shinhong: False - ttendlim: !!python/none + ttendlim: null xkzm_h: 1.0 xkzm_m: 1.0 xkzminv: 0.3 namsfc: landice: True ldebug: False - surf_map_nml: + surf_map_nml: null FV3_GFS_v15_thompson_mynn_lam3km: atmos_model_nml: avg_max_length: 3600.0 fv_core_nml: agrid_vel_rst: True - full_zs_filter: !!python/none + full_zs_filter: null n_sponge: 9 npz_type: '' rf_fast: False @@ -382,20 +377,20 @@ FV3_GFS_v15_thompson_mynn_lam3km: iopt_snf: 4 iopt_stc: 1 iopt_tbot: 2 - iopt_trs: !!python/none + iopt_trs: null iovr: 3 ldiag_ugwp: False lgfdlmprad: False lsm: 1 - lsoil: !!python/none - lsoil_lsm: !!python/none + lsoil: null + lsoil_lsm: null ltaerosol: False print_diff_pgr: True - sfclay_compute_flux: !!python/none + sfclay_compute_flux: null xkzminv: 0.3 xkzm_m: 1.0 xkzm_h: 1.0 - surf_map_nml: !!python/none + surf_map_nml: null FV3_GFS_v16: cires_ugwp_nml: @@ -419,7 +414,7 @@ FV3_GFS_v16: na_init: 0 nudge_dz: False res_latlon_dynamics: '' - rf_fast: !!python/none + rf_fast: null tau: 10.0 gfdl_cloud_microphysics_nml: <<: *gfs_gfdl_cloud_mp @@ -431,10 +426,10 @@ FV3_GFS_v16: gfs_physics_nml: <<: *gfs_v15_gfs_physics cdmbgwd: [4.0, 0.15, 1.0, 1.0] - do_myjpbl: !!python/none - do_myjsfc: !!python/none + do_myjpbl: null + do_myjsfc: null do_tofd: True - do_ysu: !!python/none + do_ysu: null hybedmf: False iaer: 5111 icliq_sw: 2 @@ -443,143 +438,109 @@ FV3_GFS_v16: isatmedmf: 1 lgfdlmprad: True lheatstrg: True - lndp_type: !!python/none + lndp_type: null lsoil: 4 - n_var_lndp: !!python/none + n_var_lndp: null prautco: [0.00015, 0.00015] psautco: [0.0008, 0.0005] satmedmf: True - shinhong: !!python/none - xkzminv: !!python/none - xkzm_m: !!python/none - xkzm_h: !!python/none + shinhong: null + xkzminv: null + xkzm_m: null + xkzm_h: null mpp_io_nml: deflate_level: 1 shuffle: 1 namsfc: landice: True ldebug: False - surf_map_nml: !!python/none + surf_map_nml: null FV3_GFS_v17_p8: cires_ugwp_nml: launch_level: 27 fv_core_nml: <<: *gfs_v15_fv_core - agrid_vel_rst: True + agrid_vel_rst: False d2_bg_k1: 0.2 - d2_bg_k2: 0.04 - delt_max: 0.002 + d2_bg_k2: 0.0 dnats: 0 do_sat_adj: False - do_vort_damp: !!python/none - full_zs_filter: !!python/none fv_sg_adj: 450 hord_dp: -5 hord_mt: 5 hord_tm: 5 - hord_vt: 5 hord_tr: 8 + hord_vt: 5 k_split: 6 + make_nh: True n_split: 6 n_sponge: 10 - nord: 2 + na_init: 1 + nord: 1 nudge_dz: False - n_zs_filter: !!python/none - range_warn: True res_latlon_dynamics: '' - rf_fast: !!python/none + rf_fast: null tau: 10.0 - gfdl_cloud_microphysics_nml: - <<: *gfs_gfdl_cloud_mp - mp_time: 150.0 - reiflag: 2 - rthresh: 1.0e-06 - sedi_transport: True - tau_l2v: 225.0 - tau_v2l: 150.0 gfs_physics_nml: - <<: *gfs_v15_gfs_physics - active_gases: h2o_co2_o3_n2o_ch4_o2 - bl_mynn_edmf: 1 - bl_mynn_edmf_mom: 1 - bl_mynn_tkeadvect: True - cdmbgwd: [4.0, 0.15, 1.0, 1.0] - cplchm: False + cdmbgwd: [4.0, 0.05, 1.0, 1.0] + cnvcld: True + cnvgwd: True decfl: 10 + do_deep: True do_gsl_drag_ls_bl: False do_gsl_drag_ss: True do_gsl_drag_tofd: False - do_myjpbl: !!python/none - do_myjsfc: !!python/none do_mynnedmf: False do_mynnsfclay: False - do_RRTMGP: False + do_tofd: False + do_ugwp: False do_ugwp_v0: True - do_ugwp_v0_nst_only: False do_ugwp_v0_orog_only: False + do_ugwp_v0_nst_only: False do_ugwp_v1: False do_ugwp_v1_orog_only: False - do_ysu: !!python/none - doGP_cldoptics_LUT: False - doGP_lwscat: False - dt_inner: 150 - frac_grid: True + dt_inner: 150.0 + fhlwr: 1200.0 + fhswr: 1200.0 + frac_grid: False gwd_opt: 2 - hybedmf: False iaer: 1011 ialb: 2 - iau_inc_files: !!python/none - icloud_bl: 1 icliq_sw: 2 iems: 2 - imp_physics: 8 + imfdeepcnv: 2 + imfshalcnv: 2 iopt_alb: 1 + iopt_btr: 1 iopt_crs: 2 iopt_dveg: 4 + iopt_frz: 1 + iopt_inf: 1 iopt_rad: 3 + iopt_run: 1 iopt_sfc: 3 + iopt_snf: 4 iopt_stc: 3 + iopt_tbot: 2 iovr: 3 isatmedmf: 1 - ldiag_ugwp: !!python/none + ldiag_ugwp: False + lseaspray: True lgfdlmprad: False + lheatstrg: False lradar: False - lseaspray: True lsm: 2 - lsoil: 4 lsoil_lsm: 4 ltaerosol: False - lw_file_clouds: rrtmgp-cloud-optics-coeffs-lw.nc - lw_file_gas: rrtmgp-data-lw-g128-210809.nc min_lakeice: 0.15 min_seaice: 0.15 - nsfullradar_diag: !!python/none - prautco: [0.00015, 0.00015] - psautco: [0.0008, 0.0005] qdiag3d: False ras: False - rrtmgp_nBandsLW: 16 - rrtmgp_nBandsSW: 14 - rrtmgp_nGptsLW: 128 - rrtmgp_nGptsSW: 112 satmedmf: True sedi_semi: True - sfclay_compute_flux: !!python/none - shinhong: !!python/none - sw_file_clouds: rrtmgp-cloud-optics-coeffs-sw.nc - sw_file_gas: rrtmgp-data-sw-g112-210809.nc - ttendlim: -999 - xkzminv: !!python/none - xkzm_m: !!python/none - xkzm_h: !!python/none + shal_cnv: True mpp_io_nml: deflate_level: 1 shuffle: 1 - namsfc: - fsicl: 0 - fsics: 0 - landice: False - ldebug: False - surf_map_nml: !!python/none - + surf_map_nml: null diff --git a/parm/aqm.rc b/parm/aqm.rc index 3d2ad32711..4ffaf5095e 100644 --- a/parm/aqm.rc +++ b/parm/aqm.rc @@ -7,14 +7,14 @@ # # General settings # -ae_matrix_nml: {{ aqm_config_dir }}/AE_cb6r3_ae6_aq.nml -gc_matrix_nml: {{ aqm_config_dir }}/GC_cb6r3_ae6_aq.nml -nr_matrix_nml: {{ aqm_config_dir }}/NR_cb6r3_ae6_aq.nml -tr_matrix_nml: {{ aqm_config_dir }}/Species_Table_TR_0.nml +ae_matrix_nml: {{ fixaqm }}/epa/AE_cb6r3_ae6_aq.nml +gc_matrix_nml: {{ fixaqm }}/epa/GC_cb6r3_ae6_aq.nml +nr_matrix_nml: {{ fixaqm }}/epa/NR_cb6r3_ae6_aq.nml +tr_matrix_nml: {{ fixaqm }}/epa/Species_Table_TR_0.nml -csqy_data: {{ aqm_config_dir }}/CSQY_DATA_cb6r3_ae6_aq -optics_data: {{ aqm_config_dir }}/PHOT_OPTICS.dat -omi_data: {{ aqm_config_dir }}/omi_cmaq_2015_361X179.dat +csqy_data: {{ fixaqm }}/epa/CSQY_DATA_cb6r3_ae6_aq +optics_data: {{ fixaqm }}/epa/PHOT_OPTICS.dat +omi_data: {{ fixaqm }}/epa/omi_cmaq_2015_361X179.dat init_concentrations: {{ init_concentrations | lower }} @@ -172,7 +172,7 @@ bio_format: netcdf bio_file: {{ aqm_rc_bio_file_fp }} bio_frequency: static bio_period: summer -bio_speciation_file: {{ dcominbio }}/gspro_biogenics_1mar2017.txt +bio_speciation_file: {{ fixaqm }}/bio/gspro_biogenics_1mar2017.txt bio_speciation_profile: B10C6 bio_species:: AVG_NOAG_GROW 1.00000 AVG_NOAG_GROW gmN/hr diff --git a/parm/data_locations.yml b/parm/data_locations.yml index 7901f4c085..e65a796739 100644 --- a/parm/data_locations.yml +++ b/parm/data_locations.yml @@ -236,6 +236,20 @@ RAP: file_names: <<: *rap_file_names +RRFS: + hpss: + protocol: htar + file_names: &rrfs_file_names + anl: + - rrfs.t{hh}z.prslev.f{fcst_hr:03d}.conus.grib2 + fcst: + - rrfs.t{hh}z.prslev.f{fcst_hr:03d}.conus.grib2 + aws: + protocol: download + url: https://noaa-rrfs-pds.s3.amazonaws.com/rrfs_a/rrfs_a.{yyyymmdd}/{hh}/control/ + file_names: + <<: *rrfs_file_names + HRRR: hpss: protocol: htar diff --git a/parm/fixed_files_mapping.yaml b/parm/fixed_files_mapping.yaml index 90fd1870a4..49d3191de5 100644 --- a/parm/fixed_files_mapping.yaml +++ b/parm/fixed_files_mapping.yaml @@ -139,35 +139,6 @@ fixed_files: !join_str ["FNSMCC | ",*FNSMCC], !join_str ["FNMSKH | ",*FNMSKH] ] - #"FNZORC | $FNZORC", - - # - #----------------------------------------------------------------------- - # - # FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING: - # This array is used to set some of the namelist variables in the forecast - # model's namelist file that represent the relative or absolute paths of - # various fixed files (the first column of the array, where columns are - # delineated by the pipe symbol "|") to the full paths to surface climatology - # files (on the native FV3-LAM grid) in the FIXlam directory derived from - # the corresponding surface climatology fields (the second column of the - # array). - # - #----------------------------------------------------------------------- - # - FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING: [ - "FNALBC | snowfree_albedo", - "FNALBC2 | facsf", - "FNTG3C | substrate_temperature", - "FNVEGC | vegetation_greenness", - "FNVETC | vegetation_type", - "FNSOTC | soil_type", - "FNVMNC | vegetation_greenness", - "FNVMXC | vegetation_greenness", - "FNSLPC | slope_type", - "FNABSC | maximum_snow_albedo" - ] - # #----------------------------------------------------------------------- @@ -204,7 +175,7 @@ fixed_files: "global_tg3clim.2.6x1.5.grb | global_tg3clim.2.6x1.5.grb", "sfc_emissivity_idx.txt | global_sfc_emissivity_idx.txt", "solarconstant_noaa_an.txt | global_solarconstant_noaa_an.txt", - "global_o3prdlos.f77 | " + "global_o3prdlos.f77 | ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77" ] # diff --git a/parm/metplus/EnsembleStat.conf b/parm/metplus/EnsembleStat.conf new file mode 100644 index 0000000000..2caeda1521 --- /dev/null +++ b/parm/metplus/EnsembleStat.conf @@ -0,0 +1,742 @@ +# {{MetplusToolName}} METplus Configuration + +[config] + +# List of applications (tools) to run. +PROCESS_LIST = {{MetplusToolName}} + +# time looping - options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +LOOP_BY = INIT + +# Format of INIT_BEG and INIT_END using % items +# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. +# see www.strftime.org for more information +# %Y%m%d%H expands to YYYYMMDDHH +INIT_TIME_FMT = %Y%m%d%H + +# Start time for METplus run - must match INIT_TIME_FMT +INIT_BEG = {{cdate}} + +# End time for METplus run - must match INIT_TIME_FMT +INIT_END = {{cdate}} + +# Increment between METplus runs (in seconds if no units are specified). +# Must be >= 60 seconds. +INIT_INCREMENT = 3600 + +# List of forecast leads to process for each run time (init or valid) +# In hours if units are not specified +# If unset, defaults to 0 (don't loop through forecast leads) +LEAD_SEQ = {{fhr_list}} +# +# Order of loops to process data - Options are times, processes +# Not relevant if only one item is in the PROCESS_LIST +# times = run all wrappers in the PROCESS_LIST for a single run time, then +# increment the run time and run all wrappers again until all times have +# been evaluated. +# processes = run the first wrapper in the PROCESS_LIST for all times +# specified, then repeat for the next item in the PROCESS_LIST until all +# wrappers have been run +# +LOOP_ORDER = times +# +# Specify the name of the METplus log file. +# +LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} +# +# Specify the location and name of the final METplus conf file. +# +METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}} +# +# Location of MET configuration file to pass to {{MetplusToolName}}. +# +# References PARM_BASE, which is the location of the parm directory +# corresponding to the ush directory of the run_metplus.py script that +# is called or the value of the environment variable METPLUS_PARM_BASE +# if set. +# +{{METPLUS_TOOL_NAME}}_CONFIG_FILE = {PARM_BASE}/met_config/{{MetplusToolName}}Config_wrapped +# +# Name to identify model (forecast) data in output. +# +MODEL = {{vx_fcst_model_name}} + +{{METPLUS_TOOL_NAME}}_DESC = NA +# +# Name to identify observation data in output. +# +OBTYPE = {{obtype}} +# +# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. +# +LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} +# +# Observation data time window(s). +# +{%- if input_field_group in ['APCP', 'ASNOW'] %} +OBS_FILE_WINDOW_BEGIN = 0 +OBS_FILE_WINDOW_END = 0 +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_BEGIN = 0 +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_END = 0 +{%- elif input_field_group in ['REFC', 'RETOP'] %} +OBS_FILE_WINDOW_BEGIN = -300 +OBS_FILE_WINDOW_END = 300 +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_BEGIN = 0 +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_END = 0 +{%- elif input_field_group in ['ADPSFC', 'ADPUPA'] %} +OBS_WINDOW_BEGIN = -1799 +OBS_WINDOW_END = 1800 +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_END = {OBS_WINDOW_END} +{%- endif %} + +# number of expected members for ensemble. Should correspond with the +# number of items in the list for FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE +{{METPLUS_TOOL_NAME}}_N_MEMBERS = {{num_ens_members}} + +# ens.ens_thresh value in the MET config file +# threshold for ratio of valid files to expected files to allow app to run +{{METPLUS_TOOL_NAME}}_ENS_THRESH = 0.05 + +# ens.vld_thresh value in the MET config file +{{METPLUS_TOOL_NAME}}_ENS_VLD_THRESH = 1.0 + +{%- if input_field_group in ['ADPSFC', 'ADPUPA'] %} + +{{METPLUS_TOOL_NAME}}_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA +#{{METPLUS_TOOL_NAME}}_OBS_QUALITY_EXC = +{%- endif %} + +# {{METPLUS_TOOL_NAME}}_MET_OBS_ERR_TABLE is not required. +# If the variable is not defined, or the value is not set, then the MET +# default is used. +{%- if input_field_group in ['APCP', 'ASNOW'] %} +{{METPLUS_TOOL_NAME}}_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt +{%- elif input_field_group in ['REFC', 'RETOP'] %} +{{METPLUS_TOOL_NAME}}_MET_OBS_ERR_TABLE = +{%- elif input_field_group in ['ADPSFC', 'ADPUPA'] %} +{{METPLUS_TOOL_NAME}}_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt +{%- endif %} + + +# Used in the MET config file for: regrid to_grid field +{%- set comment_or_null = '' %} +{%- set regrid_to_grid = '' %} +{%- set regrid_method = '' %} +{%- if input_field_group in ['APCP', 'ASNOW'] %} + {%- set comment_or_null = '' %} + {%- set regrid_to_grid = 'FCST' %} + {%- set regrid_method = 'BUDGET' %} +{%- elif input_field_group in ['REFC', 'RETOP'] %} + {%- set comment_or_null = '' %} + {%- set regrid_to_grid = 'FCST' %} + {%- set regrid_method = 'BUDGET' %} +{%- elif input_field_group in ['ADPSFC', 'ADPUPA'] %} + {%- set comment_or_null = '#' %} + {%- set regrid_to_grid = 'NONE' %} + {%- set regrid_method = 'BILIN' %} +{%- endif %} +{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = {{regrid_to_grid}} +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_REGRID_METHOD = {{regrid_method}} +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2 +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_REGRID_VLD_THRESH = 0.5 +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_REGRID_SHAPE = SQUARE + +{{METPLUS_TOOL_NAME}}_CENSOR_THRESH = +{{METPLUS_TOOL_NAME}}_CENSOR_VAL = +{% if input_field_group in ['APCP', 'ASNOW'] %} +{{METPLUS_TOOL_NAME}}_MESSAGE_TYPE = +{{METPLUS_TOOL_NAME}}_DUPLICATE_FLAG = UNIQUE +{{METPLUS_TOOL_NAME}}_SKIP_CONST = TRUE +{{METPLUS_TOOL_NAME}}_OBS_ERROR_FLAG = TRUE +{%- elif input_field_group in ['REFC', 'RETOP'] %} +# Should this parameter be set to something other than ADPSFC (maybe +# just leave empty) since we are not verifying surface fields? +{{METPLUS_TOOL_NAME}}_MESSAGE_TYPE = ADPSFC +{{METPLUS_TOOL_NAME}}_DUPLICATE_FLAG = NONE +{{METPLUS_TOOL_NAME}}_SKIP_CONST = TRUE +{{METPLUS_TOOL_NAME}}_OBS_ERROR_FLAG = FALSE +{%- elif input_field_group in ['ADPSFC', 'ADPUPA'] %} +{{METPLUS_TOOL_NAME}}_MESSAGE_TYPE = {{input_field_group}} +{{METPLUS_TOOL_NAME}}_DUPLICATE_FLAG = NONE +{{METPLUS_TOOL_NAME}}_SKIP_CONST = FALSE +{{METPLUS_TOOL_NAME}}_OBS_ERROR_FLAG = FALSE +{%- endif %} + +{{METPLUS_TOOL_NAME}}_ENS_SSVAR_BIN_SIZE = 1.0 +{{METPLUS_TOOL_NAME}}_ENS_PHIST_BIN_SIZE = 0.05 + +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FILE_NAME = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FIELD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_WIDTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_SHAPE = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_MATCH_MONTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_DAY_INTERVAL = 31 +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_HOUR_INTERVAL = 6 + +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FILE_NAME = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FIELD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_WIDTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_SHAPE = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_TIME_INTERP_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_MATCH_MONTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_DAY_INTERVAL = 31 +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_HOUR_INTERVAL = 6 + +{% set comment_or_null = '' %} +{%- if input_field_group in ['APCP', 'ASNOW'] %} + {%- set comment_or_null = '' %} +{%- elif input_field_group in ['REFC', 'RETOP'] %} + {%- set comment_or_null = '' %} +{%- elif input_field_group in ['ADPSFC', 'ADPUPA'] %} + {%- set comment_or_null = '#' %} +{%- endif %} +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1 +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False +{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = False +{% if input_field_group in ['APCP', 'ASNOW'] %} +{{METPLUS_TOOL_NAME}}_MASK_GRID = +{%- elif input_field_group in ['REFC', 'RETOP'] %} +{{METPLUS_TOOL_NAME}}_MASK_GRID = FULL +{%- elif input_field_group in ['ADPSFC', 'ADPUPA'] %} +{{METPLUS_TOOL_NAME}}_MASK_GRID = +{%- endif %} + +{{METPLUS_TOOL_NAME}}_CI_ALPHA = 0.05 + +{{METPLUS_TOOL_NAME}}_INTERP_FIELD = BOTH +{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = 1.0 +{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = SQUARE +{{METPLUS_TOOL_NAME}}_INTERP_METHOD = NEAREST +{{METPLUS_TOOL_NAME}}_INTERP_WIDTH = 1 + +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECNT = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_RPS = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_RHIST = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PHIST = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ORANK = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SSVAR = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_RELP = STAT + +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_RANK = FALSE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_WEIGHT = FALSE +# +# Forecast and observation variables and levels as specified in the fcst +# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME, +# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME, +# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION. +# +{#- +Import the file containing jinja macros. +#} +{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %} + +{#- +Jinja requires certain variables to be defined globally within the template +before they can be used in if-statements and other scopes (see Jinja scoping +rules). Define such variables. +#} +{%- set indx_level_fcst = '' %} +{%- set indx_input_thresh_fcst = '' %} +{%- set error_msg = '' %} +{%- set opts_indent = '' %} +{%- set opts_indent_len = '' %} + +{%- set field_fcst = '' %} +{%- set field_obs = '' %} +{%- set level_fcst = '' %} +{%- set level_obs = '' %} +{%- set thresh_fcst = '' %} +{%- set thresh_obs = '' %} + +{%- set threshes_fcst = [] %} +{%- set threshes_obs = [] %} + +{#- +Get the set of valid field groups and ensure that the specified input +field group appears in this list. +#} +{%- set valid_field_groups = vx_config_dict.keys()|list %} +{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }} + +{#- +Reset the input forecast level so that if it happens to be an accumulation +(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g. +reset to 'A3'). +#} +{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} + +{#- +Extract from the configuration dictionary the set (which itself is a +dictionary) of fields, levels, and thresholds corresponding to the input +field group. Then set the delimiter string that separates forecast and +observation values in the various items (i.e. dictionary keys and values +representing field names, levels, and thresholds) in this dictionary. +#} +{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %} +{%- set delim_str = metplus_macros.set_delim_str() %} + +{#- +Loop over the fields and set field names, levels, thresholds, and/or +options for each field, both for forecasts and for observations, in the +METplus configuration file. +#} +{%- set ns = namespace(var_count = 0) %} +{%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %} + + {%- if delim_str in field_cpld %} + {%- set field_fcst, field_obs = field_cpld.split(delim_str) %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- set field_obs = field_cpld %} + {%- endif %} + +{#- +For convenience, create lists of valid forecast and observation levels +for the current field. +#} + {%- set valid_levels_fcst = [] %} + {%- set valid_levels_obs = [] %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + {%- set tmp = valid_levels_fcst.append(level_fcst) %} + {%- set tmp = valid_levels_obs.append(level_obs) %} + {%- endfor %} + +{#- +Make sure that the input forecast level (input_level_fcst) is set to a +valid value. +#} + {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %} + {%- set error_msg = '\n' ~ +'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~ +'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~ +'for the current forecast field (field_fcst). This is not the case:\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~ +' input_level_fcst = ' ~ input_level_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + +{#- +Loop over the (coupled) levels and corresponding lists of thresholds. +Extract from these the level values for forecasts and observations and +use them to set the forecast and observation field names, levels, +thresholds, and/or options in the METplus configuration file. +#} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + + {%- set valid_threshes_fcst = [] %} + {%- set valid_threshes_obs = [] %} + {%- for thresh_cpld in threshes_cpld %} + {%- if delim_str in thresh_cpld %} + {%- set thresh_fcst, thresh_obs = thresh_cpld.split(delim_str) %} + {%- else %} + {%- set thresh_fcst = thresh_cpld %} + {%- set thresh_obs = thresh_cpld %} + {%- endif %} + {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %} + {%- set tmp = valid_threshes_obs.append(thresh_obs) %} + {%- endfor %} + + {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %} +{#- +Increment the METplus variable counter. +#} + {%- set ns.var_count = ns.var_count+1 %} + +{#- +Set forecast field name. Note that this has to exactly match the name +of the field in the input forecast file. + +For accumulated fields, the input forecast file is generated by MET's +PcpCombine tool. In that file, the field name consists of the forecast +field name here (field_fcst) with the accumulation period appended to +it (separated by an underscore), so we must do the same here to get an +exact match. +#} + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{accum_hh}} + {%- else %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}} + {%- endif %} + +{#- +Set forecast field level. +#} +FCST_VAR{{ns.var_count}}_LEVELS = {{level_fcst}} + +{#- +Set forecast field threshold(s). Note that no forecast thresholds are +included in the METplus configuration file if input_thresh_fcst is set +to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +If input_thresh_fcst is set to 'all', set the list of forecast thresholds +to the full set of valid values. +#} + {%- if (input_thresh_fcst == 'all') %} + + {%- set threshes_fcst = valid_threshes_fcst %} +{#- +If input_thresh_fcst is set to a specific value: +* If that value is valid, i.e. it exists in the list of valid forecast + thresholds, get its index in that list and use it to set the forecast + threshold to a one-element list containing that value. Note that the + index will be needed later below when setting the observation threshold(s). +* If the input forecast threshold is not valid, print out a warning message + and exit. +#} + {%- else %} + + {%- if input_thresh_fcst in valid_threshes_fcst %} + {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %} + {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %} + {%- else %} + {%- set error_msg = '\n' ~ +'For the current forecast field (field_fcst) and forecast level (level_fcst),\n' ~ +'the input forecast threshold (input_thresh_fcst) does not exist in the list\n' ~ +'of valid forecast thresholds (valid_threshes_fcst):\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' level_fcst = ' ~ level_fcst ~ '\n' ~ +' valid_threshes_fcst = ' ~ valid_threshes_fcst ~ '\n' ~ +' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + + {%- endif %} +{#- +If threshes_fcst has been reset to something other than its default +value of an empty list, then set the forecast thresholds in the METplus +configuration file because that implies threshes_fcst was set above to +a non-empty value. Then reset threshes_fcst to its default value for +proper processing of thresholds for the next field. +#} + {%- if (threshes_fcst != []) %} +FCST_VAR{{ns.var_count}}_THRESH = {{threshes_fcst|join(', ')}} + {%- endif %} + {%- set threshes_fcst = [] %} + + {%- endif %} + +{#- +Set forecast field options. +#} + {%- set opts_indent_len = 20 %} + {%- if (ns.var_count > 9) and (ns.var_count <= 99) %} + {%- set opts_indent_len = opts_indent_len + 1 %} + {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %} + {%- set opts_indent_len = opts_indent_len + 2 %} + {%- elif (ns.var_count > 999) %} + {%- set opts_indent_len = opts_indent_len + 3 %} + {%- endif %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'APCP' %} + + {%- if field_fcst == 'APCP' %} +FCST_VAR{{ns.var_count}}_OPTIONS = ens_ssvar_bin_size = 50.0; +{{opts_indent}}ens_phist_bin_size = 0.05; + {%- endif %} + + {%- elif input_field_group == 'ASNOW' %} + + {%- if field_fcst == 'ASNOW' %} +FCST_VAR{{ns.var_count}}_OPTIONS = ens_ssvar_bin_size = 50.0; +{{opts_indent}}ens_phist_bin_size = 0.05; + {%- endif %} + + {%- elif input_field_group == 'REFC' %} + + {%- if field_fcst == 'REFC' %} +FCST_VAR{{ns.var_count}}_OPTIONS = ens_ssvar_bin_size = 50.0; +{{opts_indent}}ens_phist_bin_size = 0.05; + {%- endif %} + + {%- elif input_field_group == 'RETOP' %} + + {%- if field_fcst == 'RETOP' %} +FCST_VAR{{ns.var_count}}_OPTIONS = convert(x) = x * 3.28084 * 0.001; ;; Convert from meters to kilofeet. +{{opts_indent}}ens_ssvar_bin_size = 50.0; +{{opts_indent}}ens_phist_bin_size = 0.05; + {%- endif %} + + {%- elif input_field_group == 'ADPSFC' %} + + {%- if field_fcst == 'HGT' %} +FCST_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 215; +{{opts_indent}}desc = "CEILING"; + {%- elif field_fcst == 'TCDC' %} +FCST_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 200; +{{opts_indent}}GRIB2_ipdtmpl_index=[27]; +{{opts_indent}}GRIB2_ipdtmpl_val=[255]; +{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- elif field_fcst == 'VIS' %} +FCST_VAR{{ns.var_count}}_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- elif field_fcst == 'WIND' %} +FCST_VAR{{ns.var_count}}_OPTIONS = GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind. + {%- endif %} + + {%- elif input_field_group == 'ADPUPA' %} + + {%- if field_fcst == 'CAPE' %} +FCST_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; + {%- endif %} + + {%- endif %} + +{#- +Set observation field name. Note that this has to exactly match the name +of the field in the input observation file. + +For accumulated fields, the input observation file is generated by MET's +PcpCombine tool. In that file, the field name consists of the observation +field name here (field_obs) with the accumulation period appended to it +(separated by an underscore), so we must do the same here to get an exact +match. + +Note: +Turns out for ASNOW, PcpCombine is not run for obs, so we exclude that +from the "if" clause here (so it goes into the "else"). For workflow +behavior uniformity between APCP and ASNOW, consider running PcpCombine +for ASNOW observations as well (just as it's run for APCP observations). + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +#} + {%- if (input_field_group in ['APCP']) %} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}}_{{accum_hh}} + {%- else %} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}} + {%- endif %} + +{#- +Set observation field level. +#} + {%- set indx_level_fcst = valid_levels_fcst.index(level_fcst) %} + {%- set level_obs = valid_levels_obs[indx_level_fcst] %} +OBS_VAR{{ns.var_count}}_LEVELS = {{level_obs}} + +{#- +Set observation field threshold(s). Note that no observation thresholds +are included in the METplus configuration file if input_thresh_fcst is +set to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +If input_thresh_fcst is set to 'all', set the list of observation thresholds +to the full set of valid values. +#} + {%- if (input_thresh_fcst == 'all') %} + + {%- set threshes_obs = valid_threshes_obs %} +{#- +If input_thresh_fcst is set to a specific forecast threshold, then the +observation threshold is given by the element in the list of valid +observation thresholds that has the same index as that of input_thresh_fcst +in the list of valid forecast thresholds. +#} + {%- else %} + {%- set threshes_obs = [valid_threshes_obs[indx_input_thresh_fcst]] %} + {%- endif %} +{#- +If threshes_obs has been reset to something other than its default value +of an empty list, then set the observation thresholds in the METplus +configuration file because that implies threshes_obs was set above to +a non-empty value. Then reset threshes_obs to its default value for +proper processing of thresholds for the next field. +#} + {%- if (threshes_obs != []) %} +OBS_VAR{{ns.var_count}}_THRESH = {{threshes_obs|join(', ')}} + {%- endif %} + {%- set threshes_obs = [] %} + + {%- endif %} + +{#- +Set observation field options. +#} + {%- set opts_indent_len = opts_indent_len - 1 %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'APCP' %} + + {%- if field_obs == 'APCP' %} +OBS_VAR{{ns.var_count}}_OPTIONS = {FCST_VAR{{ns.var_count}}_OPTIONS} + {%- endif %} + + {%- elif input_field_group == 'ASNOW' %} + + {%- if field_obs == 'ASNOW' %} +OBS_VAR{{ns.var_count}}_OPTIONS = {FCST_VAR{{ns.var_count}}_OPTIONS}; +{{opts_indent}}convert(x) = 100.0*x; + {%- endif %} + + {%- elif input_field_group == 'REFC' %} + + {%- if field_obs == 'MergedReflectivityQCComposite' %} +OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = lt-20; +{{opts_indent}}censor_val = -20.0; +{{opts_indent}}cnt_thresh = [ >15 ]; +{{opts_indent}}cnt_logic = UNION; +{{opts_indent}}ens_ssvar_bin_size = 50.0; +{{opts_indent}}ens_phist_bin_size = 0.05; + {%- endif %} + + {%- elif input_field_group == 'RETOP' %} + + {%- if field_obs == 'EchoTop18' %} +OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = lt-20; +{{opts_indent}}censor_val = -20.0; +{{opts_indent}}cnt_thresh = [ >15 ]; +{{opts_indent}}cnt_logic = UNION; +{{opts_indent}}convert(x) = x * 3280.84 * 0.001; ;; Convert from kilometers to kilofeet. +{{opts_indent}}ens_ssvar_bin_size = 50.0; +{{opts_indent}}ens_phist_bin_size = 0.05; + {%- endif %} + + {%- elif input_field_group == 'ADPSFC' %} + + {%- if field_obs in ['DPT', 'TMP', 'WIND'] %} +OBS_VAR{{ns.var_count}}_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } + {%- elif field_obs == 'CEILING' %} +OBS_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 215 + {%- endif %} + + {%- elif input_field_group == 'ADPUPA' %} + + {%- if field_obs in ['DPT', 'HGT', 'TMP', 'WIND'] %} +OBS_VAR{{ns.var_count}}_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } + {%- elif field_obs == 'CAPE' %} +OBS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; +{{opts_indent}}cnt_logic = UNION; + {%- elif field_obs == 'PBL' %} +OBS_VAR{{ns.var_count}}_OPTIONS = desc = "TKE"; + {%- endif %} + + {%- endif %} + +{#- +Print out a newline to separate the settings for the current field (both +forecast and observation settings) from those for the next field. +#} + {{- '\n' }} + + {%- endif %} + + {%- endfor %} +{%- endfor %} +[dir] +# +# OUTPUT_BASE apparently has to be set to something; it cannot be left +# to its default value. But it is not explicitly used elsewhere in this +# configuration file. +# +OUTPUT_BASE = {{output_base}} +# +# Point observation input directory for {{MetplusToolName}}. +# +{%- if input_field_group in ['ADPSFC', 'ADPUPA'] %} +OBS_{{METPLUS_TOOL_NAME}}_POINT_INPUT_DIR = {{obs_input_dir}} +{%- else %} +OBS_{{METPLUS_TOOL_NAME}}_POINT_INPUT_DIR = +{%- endif %} +# +# Grid observation input directory for {{MetplusToolName}}. +# +{%- if input_field_group in ['ADPSFC', 'ADPUPA'] %} +OBS_{{METPLUS_TOOL_NAME}}_GRID_INPUT_DIR = +{%- else %} +OBS_{{METPLUS_TOOL_NAME}}_GRID_INPUT_DIR = {{obs_input_dir}} +{%- endif %} +# +# Forecast model input directory for {{MetplusToolName}}. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}} +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used +# in this example +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR = +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used +# in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR = +# +# Output directory for {{MetplusToolName}}. +# +{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}} +# +# Directory for staging data. +# +STAGING_DIR = {{staging_dir}} + +# End of [dir] section and start of [filename_templates] section. +[filename_templates] +# +# Template for point observation input to {{MetplusToolName}} relative to +# OBS_{{METPLUS_TOOL_NAME}}_POINT_INPUT_DIR. +# +{%- if input_field_group in ['ADPSFC', 'ADPUPA'] %} +OBS_{{METPLUS_TOOL_NAME}}_POINT_INPUT_TEMPLATE = {{obs_input_fn_template}} +{%- else %} +OBS_{{METPLUS_TOOL_NAME}}_POINT_INPUT_TEMPLATE = +{%- endif %} +# +# Template for gridded observation input to {{MetplusToolName}} relative to +# OBS_{{METPLUS_TOOL_NAME}}_GRID_INPUT_DIR. +# +{%- if input_field_group in ['ADPSFC', 'ADPUPA'] %} +OBS_{{METPLUS_TOOL_NAME}}_GRID_INPUT_TEMPLATE = +{%- else %} +OBS_{{METPLUS_TOOL_NAME}}_GRID_INPUT_TEMPLATE = {{obs_input_fn_template}} +{%- endif %} +# +# Template for forecast input to {{MetplusToolName}} relative to +# FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +# Note that this can be a comma separated list of ensemble members +# or a single line, - filename wildcard characters may be used, ? or *. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}} +# +# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR. +# +{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE = +{{METPLUS_TOOL_NAME}}_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_TEMPLATE = +# +# Variable used to specify one or more verification mask files for +# {{MetplusToolName}}. Not used for this example. +# +{{METPLUS_TOOL_NAME}}_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/EnsembleStat_ADPSFC.conf b/parm/metplus/EnsembleStat_ADPSFC.conf deleted file mode 100644 index 07238030c1..0000000000 --- a/parm/metplus/EnsembleStat_ADPSFC.conf +++ /dev/null @@ -1,307 +0,0 @@ -# EnsembleStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = EnsembleStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {ENSEMBLE_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to EnsembleStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -ENSEMBLE_STAT_CONFIG_FILE = {PARM_BASE}/met_config/EnsembleStatConfig_wrapped -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -ENSEMBLE_STAT_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Observation data time window(s). -# -OBS_WINDOW_BEGIN = -1799 -OBS_WINDOW_END = 1800 -OBS_ENSEMBLE_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} -OBS_ENSEMBLE_STAT_WINDOW_END = {OBS_WINDOW_END} - -# number of expected members for ensemble. Should correspond with the -# number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE -ENSEMBLE_STAT_N_MEMBERS = {{num_ens_members}} - -# ens.ens_thresh value in the MET config file -# threshold for ratio of valid files to expected files to allow app to run -ENSEMBLE_STAT_ENS_THRESH = 0.05 - -# ens.vld_thresh value in the MET config file -ENSEMBLE_STAT_ENS_VLD_THRESH = 1.0 - -ENSEMBLE_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA -#ENSEMBLE_STAT_OBS_QUALITY_EXC = - -# ENSEMBLE_STAT_MET_OBS_ERR_TABLE is not required. -# If the variable is not defined, or the value is not set, then the MET -# default is used. -ENSEMBLE_STAT_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt - - -# Used in the MET config file for: regrid to_grid field -ENSEMBLE_STAT_REGRID_TO_GRID = NONE -#ENSEMBLE_STAT_REGRID_METHOD = BILIN -#ENSEMBLE_STAT_REGRID_WIDTH = 2 -#ENSEMBLE_STAT_REGRID_VLD_THRESH = 0.5 -#ENSEMBLE_STAT_REGRID_SHAPE = SQUARE - -ENSEMBLE_STAT_CENSOR_THRESH = -ENSEMBLE_STAT_CENSOR_VAL = - -ENSEMBLE_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} -ENSEMBLE_STAT_DUPLICATE_FLAG = NONE -ENSEMBLE_STAT_SKIP_CONST = FALSE -ENSEMBLE_STAT_OBS_ERROR_FLAG = FALSE - -ENSEMBLE_STAT_ENS_SSVAR_BIN_SIZE = 1.0 -ENSEMBLE_STAT_ENS_PHIST_BIN_SIZE = 0.05 - -#ENSEMBLE_STAT_CLIMO_MEAN_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_MEAN_FIELD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_MEAN_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_MEAN_HOUR_INTERVAL = 6 - -#ENSEMBLE_STAT_CLIMO_STDEV_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_STDEV_FIELD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_STDEV_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_STDEV_HOUR_INTERVAL = 6 - - -#ENSEMBLE_STAT_CLIMO_CDF_BINS = 1 -#ENSEMBLE_STAT_CLIMO_CDF_CENTER_BINS = False -ENSEMBLE_STAT_CLIMO_CDF_WRITE_BINS = False - -ENSEMBLE_STAT_MASK_GRID = - -ENSEMBLE_STAT_CI_ALPHA = 0.05 - -ENSEMBLE_STAT_INTERP_FIELD = BOTH -ENSEMBLE_STAT_INTERP_VLD_THRESH = 1.0 -ENSEMBLE_STAT_INTERP_SHAPE = SQUARE -ENSEMBLE_STAT_INTERP_METHOD = NEAREST -ENSEMBLE_STAT_INTERP_WIDTH = 1 - -ENSEMBLE_STAT_OUTPUT_FLAG_ECNT = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RPS = NONE -ENSEMBLE_STAT_OUTPUT_FLAG_RHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_PHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_ORANK = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_SSVAR = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RELP = STAT - -ENSEMBLE_STAT_ENSEMBLE_FLAG_RANK = FALSE -ENSEMBLE_STAT_ENSEMBLE_FLAG_WEIGHT = FALSE -# -# Forecast and observation variables and levels as specified in the fcst -# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME, -# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME, -# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION. -# -FCST_VAR1_NAME = TMP -FCST_VAR1_LEVELS = Z2 -FCST_VAR1_THRESH = ge268, ge273, ge278, ge293, ge298, ge303 -OBS_VAR1_NAME = TMP -OBS_VAR1_LEVELS = Z2 -OBS_VAR1_THRESH = ge268, ge273, ge278, ge293, ge298, ge303 -OBS_VAR1_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR2_NAME = DPT -FCST_VAR2_LEVELS = Z2 -FCST_VAR2_THRESH = ge263, ge268, ge273, ge288, ge293, ge298 -OBS_VAR2_NAME = DPT -OBS_VAR2_LEVELS = Z2 -OBS_VAR2_THRESH = ge263, ge268, ge273, ge288, ge293, ge298 -OBS_VAR2_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR3_NAME = WIND -FCST_VAR3_LEVELS = Z10 -FCST_VAR3_THRESH = ge5, ge10, ge15 -FCST_VAR3_OPTIONS = GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind. -OBS_VAR3_NAME = WIND -OBS_VAR3_LEVELS = Z10 -OBS_VAR3_THRESH = ge5, ge10, ge15 -OBS_VAR3_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR4_NAME = TCDC -FCST_VAR4_LEVELS = L0 -FCST_VAR4_THRESH = lt25, gt75 -FCST_VAR4_OPTIONS = GRIB_lvl_typ = 200; - GRIB2_ipdtmpl_index=[27]; - GRIB2_ipdtmpl_val=[255]; - interp = { type = [ { method = NEAREST; width = 1; } ]; } -OBS_VAR4_NAME = TCDC -OBS_VAR4_LEVELS = L0 -OBS_VAR4_THRESH = lt25, gt75 - -FCST_VAR5_NAME = VIS -FCST_VAR5_LEVELS = L0 -FCST_VAR5_THRESH = lt1609, lt8045, ge8045 -FCST_VAR5_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } -OBS_VAR5_NAME = VIS -OBS_VAR5_LEVELS = L0 -OBS_VAR5_THRESH = lt1609, lt8045, ge8045 - -FCST_VAR6_NAME = HGT -FCST_VAR6_LEVELS = L0 -FCST_VAR6_THRESH = lt152, lt1520, ge914 -FCST_VAR6_OPTIONS = GRIB_lvl_typ = 215; - desc = "CEILING"; -OBS_VAR6_NAME = CEILING -OBS_VAR6_LEVELS = L0 -OBS_VAR6_THRESH = lt152, lt305, ge914 -OBS_VAR6_OPTIONS = GRIB_lvl_typ = 215 - -[dir] -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -# -# Point observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_DIR = {{obs_input_dir}} -# -# Grid observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_DIR = -# -# Forecast model input directory for EnsembleStat. -# -FCST_ENSEMBLE_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Output directory for EnsembleStat. -# -ENSEMBLE_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for point observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_POINT_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for gridded observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_GRID_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_TEMPLATE = -# -# Template for forecast input to EnsembleStat relative to -# FCST_ENSEMBLE_STAT_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from EnsembleStat relative to ENSEMBLE_STAT_OUTPUT_DIR. -# -ENSEMBLE_STAT_OUTPUT_TEMPLATE = -ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# EnsembleStat. Not used for this example. -# -ENSEMBLE_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/EnsembleStat_ADPUPA.conf b/parm/metplus/EnsembleStat_ADPUPA.conf deleted file mode 100644 index edfda41b89..0000000000 --- a/parm/metplus/EnsembleStat_ADPUPA.conf +++ /dev/null @@ -1,351 +0,0 @@ -# EnsembleStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = EnsembleStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {ENSEMBLE_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to EnsembleStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -ENSEMBLE_STAT_CONFIG_FILE = {PARM_BASE}/met_config/EnsembleStatConfig_wrapped -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -ENSEMBLE_STAT_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Observation data time window(s). -# -OBS_WINDOW_BEGIN = -1799 -OBS_WINDOW_END = 1800 -OBS_ENSEMBLE_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} -OBS_ENSEMBLE_STAT_WINDOW_END = {OBS_WINDOW_END} - -# number of expected members for ensemble. Should correspond with the -# number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE -ENSEMBLE_STAT_N_MEMBERS = {{num_ens_members}} - -# ens.ens_thresh value in the MET config file -# threshold for ratio of valid files to expected files to allow app to run -ENSEMBLE_STAT_ENS_THRESH = 0.05 - -# ens.vld_thresh value in the MET config file -ENSEMBLE_STAT_ENS_VLD_THRESH = 1.0 - -ENSEMBLE_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA -#ENSEMBLE_STAT_OBS_QUALITY_EXC = - -# ENSEMBLE_STAT_MET_OBS_ERR_TABLE is not required. -# If the variable is not defined, or the value is not set, then the MET -# default is used. -ENSEMBLE_STAT_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt - - -# Used in the MET config file for: regrid to_grid field -ENSEMBLE_STAT_REGRID_TO_GRID = NONE -#ENSEMBLE_STAT_REGRID_METHOD = BILIN -#ENSEMBLE_STAT_REGRID_WIDTH = 2 -#ENSEMBLE_STAT_REGRID_VLD_THRESH = 0.5 -#ENSEMBLE_STAT_REGRID_SHAPE = SQUARE - -ENSEMBLE_STAT_CENSOR_THRESH = -ENSEMBLE_STAT_CENSOR_VAL = - -ENSEMBLE_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} -ENSEMBLE_STAT_DUPLICATE_FLAG = NONE -ENSEMBLE_STAT_SKIP_CONST = FALSE -ENSEMBLE_STAT_OBS_ERROR_FLAG = FALSE - -ENSEMBLE_STAT_ENS_SSVAR_BIN_SIZE = 1.0 -ENSEMBLE_STAT_ENS_PHIST_BIN_SIZE = 0.05 - -#ENSEMBLE_STAT_CLIMO_MEAN_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_MEAN_FIELD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_MEAN_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_MEAN_HOUR_INTERVAL = 6 - -#ENSEMBLE_STAT_CLIMO_STDEV_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_STDEV_FIELD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_STDEV_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_STDEV_HOUR_INTERVAL = 6 - - -#ENSEMBLE_STAT_CLIMO_CDF_BINS = 1 -#ENSEMBLE_STAT_CLIMO_CDF_CENTER_BINS = False -ENSEMBLE_STAT_CLIMO_CDF_WRITE_BINS = False - -ENSEMBLE_STAT_MASK_GRID = - -ENSEMBLE_STAT_CI_ALPHA = 0.05 - -ENSEMBLE_STAT_INTERP_FIELD = BOTH -ENSEMBLE_STAT_INTERP_VLD_THRESH = 1.0 -ENSEMBLE_STAT_INTERP_SHAPE = SQUARE -ENSEMBLE_STAT_INTERP_METHOD = NEAREST -ENSEMBLE_STAT_INTERP_WIDTH = 1 - -ENSEMBLE_STAT_OUTPUT_FLAG_ECNT = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RPS = NONE -ENSEMBLE_STAT_OUTPUT_FLAG_RHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_PHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_ORANK = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_SSVAR = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RELP = STAT - -ENSEMBLE_STAT_ENSEMBLE_FLAG_RANK = FALSE -ENSEMBLE_STAT_ENSEMBLE_FLAG_WEIGHT = FALSE -# -# Forecast and observation variables and levels as specified in the fcst -# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME, -# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME, -# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION. -# -FCST_VAR1_NAME = TMP -FCST_VAR1_LEVELS = P850 -FCST_VAR1_THRESH = ge288, ge293, ge298 -OBS_VAR1_NAME = TMP -OBS_VAR1_LEVELS = P850 -OBS_VAR1_THRESH = ge288, ge293, ge298 -OBS_VAR1_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR2_NAME = TMP -FCST_VAR2_LEVELS = P700 -FCST_VAR2_THRESH = ge273, ge278, ge283 -OBS_VAR2_NAME = TMP -OBS_VAR2_LEVELS = P700 -OBS_VAR2_THRESH = ge273, ge278, ge283 -OBS_VAR2_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR3_NAME = TMP -FCST_VAR3_LEVELS = P500 -FCST_VAR3_THRESH = ge258, ge263, ge268 -OBS_VAR3_NAME = TMP -OBS_VAR3_LEVELS = P500 -OBS_VAR3_THRESH = ge258, ge263, ge268 -OBS_VAR3_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR4_NAME = DPT -FCST_VAR4_LEVELS = P850 -FCST_VAR4_THRESH = ge273, ge278, ge283 -OBS_VAR4_NAME = DPT -OBS_VAR4_LEVELS = P850 -OBS_VAR4_THRESH = ge273, ge278, ge283 -OBS_VAR4_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR5_NAME = DPT -FCST_VAR5_LEVELS = P700 -FCST_VAR5_THRESH = ge263, ge286, ge273 -OBS_VAR5_NAME = DPT -OBS_VAR5_LEVELS = P700 -OBS_VAR5_THRESH = ge263, ge286, ge273 -OBS_VAR5_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR6_NAME = WIND -FCST_VAR6_LEVELS = P850 -FCST_VAR6_THRESH = ge5, ge10, ge15 -OBS_VAR6_NAME = WIND -OBS_VAR6_LEVELS = P850 -OBS_VAR6_THRESH = ge5, ge10, ge15 -OBS_VAR6_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR7_NAME = WIND -FCST_VAR7_LEVELS = P700 -FCST_VAR7_THRESH = ge10, ge15, ge20 -OBS_VAR7_NAME = WIND -OBS_VAR7_LEVELS = P700 -OBS_VAR7_THRESH = ge10, ge15, ge20 -OBS_VAR7_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR8_NAME = WIND -FCST_VAR8_LEVELS = P500 -FCST_VAR8_THRESH = ge15, ge21, ge26 -OBS_VAR8_NAME = WIND -OBS_VAR8_LEVELS = P500 -OBS_VAR8_THRESH = ge15, ge21, ge26 -OBS_VAR8_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR9_NAME = WIND -FCST_VAR9_LEVELS = P250 -FCST_VAR9_THRESH = ge26, ge31, ge36, ge46, ge62 -OBS_VAR9_NAME = WIND -OBS_VAR9_LEVELS = P250 -OBS_VAR9_THRESH = ge26, ge31, ge36, ge46, ge62 -OBS_VAR9_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR10_NAME = HGT -FCST_VAR10_LEVELS = P500 -FCST_VAR10_THRESH = ge5400, ge5600, ge5880 -OBS_VAR10_NAME = HGT -OBS_VAR10_LEVELS = P500 -OBS_VAR10_THRESH = ge5400, ge5600, ge5880 -OBS_VAR10_OPTIONS = obs_error = { flag = TRUE; dist_type = NONE; dist_parm = []; inst_bias_scale = 1.0; inst_bias_offset = 0.0; min = NA; max = NA; } - -FCST_VAR11_NAME = CAPE -FCST_VAR11_LEVELS = L0 -FCST_VAR11_THRESH = le1000, gt1000&<2500, ge2500&<4000, ge2500 -FCST_VAR11_OPTIONS = cnt_thresh = [ >0 ]; -OBS_VAR11_NAME = CAPE -OBS_VAR11_LEVELS = L0-100000 -OBS_VAR11_THRESH = le1000, gt1000&<2500, ge2500&<4000, ge2500 -OBS_VAR11_OPTIONS = cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -FCST_VAR12_NAME = HPBL -FCST_VAR12_LEVELS = Z0 -FCST_VAR12_THRESH = lt500, lt1500, gt1500 -OBS_VAR12_NAME = PBL -OBS_VAR12_LEVELS = L0 -OBS_VAR12_THRESH = lt500, lt1500, gt1500 -OBS_VAR12_OPTIONS = desc = "TKE"; - -[dir] -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -# -# Point observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_DIR = {{obs_input_dir}} -# -# Grid observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_DIR = -# -# Forecast model input directory for EnsembleStat. -# -FCST_ENSEMBLE_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Output directory for EnsembleStat. -# -ENSEMBLE_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for point observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_POINT_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for gridded observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_GRID_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_TEMPLATE = -# -# Template for forecast input to EnsembleStat relative to -# FCST_ENSEMBLE_STAT_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from EnsembleStat relative to ENSEMBLE_STAT_OUTPUT_DIR. -# -ENSEMBLE_STAT_OUTPUT_TEMPLATE = -ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# EnsembleStat. Not used for this example. -# -ENSEMBLE_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/EnsembleStat_APCP.conf b/parm/metplus/EnsembleStat_APCP.conf deleted file mode 100644 index 7604a90bd7..0000000000 --- a/parm/metplus/EnsembleStat_APCP.conf +++ /dev/null @@ -1,258 +0,0 @@ -# EnsembleStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = EnsembleStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {ENSEMBLE_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to EnsembleStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -ENSEMBLE_STAT_CONFIG_FILE = {PARM_BASE}/met_config/EnsembleStatConfig_wrapped -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -ENSEMBLE_STAT_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Observation data time window(s). -# -OBS_FILE_WINDOW_BEGIN = 0 -OBS_FILE_WINDOW_END = 0 -OBS_ENSEMBLE_STAT_WINDOW_BEGIN = 0 -OBS_ENSEMBLE_STAT_WINDOW_END = 0 - -# number of expected members for ensemble. Should correspond with the -# number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE -ENSEMBLE_STAT_N_MEMBERS = {{num_ens_members}} - -# ens.ens_thresh value in the MET config file -# threshold for ratio of valid files to expected files to allow app to run -ENSEMBLE_STAT_ENS_THRESH = 0.05 - -# ens.vld_thresh value in the MET config file -ENSEMBLE_STAT_ENS_VLD_THRESH = 1.0 - -# ENSEMBLE_STAT_MET_OBS_ERR_TABLE is not required. -# If the variable is not defined, or the value is not set, then the MET -# default is used. -ENSEMBLE_STAT_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt - - -# Used in the MET config file for: regrid to_grid field -ENSEMBLE_STAT_REGRID_TO_GRID = FCST -ENSEMBLE_STAT_REGRID_METHOD = BUDGET -ENSEMBLE_STAT_REGRID_WIDTH = 2 -ENSEMBLE_STAT_REGRID_VLD_THRESH = 0.5 -ENSEMBLE_STAT_REGRID_SHAPE = SQUARE - -ENSEMBLE_STAT_CENSOR_THRESH = -ENSEMBLE_STAT_CENSOR_VAL = - -ENSEMBLE_STAT_MESSAGE_TYPE = -ENSEMBLE_STAT_DUPLICATE_FLAG = UNIQUE -ENSEMBLE_STAT_SKIP_CONST = TRUE -ENSEMBLE_STAT_OBS_ERROR_FLAG = TRUE - -ENSEMBLE_STAT_ENS_SSVAR_BIN_SIZE = 1.0 -ENSEMBLE_STAT_ENS_PHIST_BIN_SIZE = 0.05 - -#ENSEMBLE_STAT_CLIMO_MEAN_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_MEAN_FIELD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_MEAN_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_MEAN_HOUR_INTERVAL = 6 - -#ENSEMBLE_STAT_CLIMO_STDEV_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_STDEV_FIELD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_STDEV_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_STDEV_HOUR_INTERVAL = 6 - - -ENSEMBLE_STAT_CLIMO_CDF_BINS = 1 -ENSEMBLE_STAT_CLIMO_CDF_CENTER_BINS = False -ENSEMBLE_STAT_CLIMO_CDF_WRITE_BINS = False - -ENSEMBLE_STAT_MASK_GRID = - -ENSEMBLE_STAT_CI_ALPHA = 0.05 - -ENSEMBLE_STAT_INTERP_FIELD = BOTH -ENSEMBLE_STAT_INTERP_VLD_THRESH = 1.0 -ENSEMBLE_STAT_INTERP_SHAPE = SQUARE -ENSEMBLE_STAT_INTERP_METHOD = NEAREST -ENSEMBLE_STAT_INTERP_WIDTH = 1 - -ENSEMBLE_STAT_OUTPUT_FLAG_ECNT = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RPS = NONE -ENSEMBLE_STAT_OUTPUT_FLAG_RHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_PHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_ORANK = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_SSVAR = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RELP = STAT - -ENSEMBLE_STAT_ENSEMBLE_FLAG_RANK = FALSE -ENSEMBLE_STAT_ENSEMBLE_FLAG_WEIGHT = FALSE -# -# Forecast and observation variables and levels as specified in the fcst -# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME, -# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME, -# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION. -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -FCST_VAR1_LEVELS = A{{accum_hh}} -FCST_VAR1_OPTIONS = ens_ssvar_bin_size = 50.0; - ens_phist_bin_size = 0.05; -OBS_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR1_LEVELS = A{{accum_hh}} -OBS_VAR1_OPTIONS = {FCST_VAR1_OPTIONS} - -[dir] -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -# -# Point observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_DIR = -# -# Grid observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_DIR = {{obs_input_dir}} -# -# Forecast model input directory for EnsembleStat. -# -FCST_ENSEMBLE_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Output directory for EnsembleStat. -# -ENSEMBLE_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for point observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_POINT_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_TEMPLATE = -# -# Template for gridded observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_GRID_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to EnsembleStat relative to -# FCST_ENSEMBLE_STAT_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from EnsembleStat relative to ENSEMBLE_STAT_OUTPUT_DIR. -# -ENSEMBLE_STAT_OUTPUT_TEMPLATE = -ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# EnsembleStat. Not used for this example. -# -ENSEMBLE_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/EnsembleStat_ASNOW.conf b/parm/metplus/EnsembleStat_ASNOW.conf deleted file mode 100644 index 8897b03295..0000000000 --- a/parm/metplus/EnsembleStat_ASNOW.conf +++ /dev/null @@ -1,259 +0,0 @@ -# EnsembleStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = EnsembleStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {ENSEMBLE_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to EnsembleStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -ENSEMBLE_STAT_CONFIG_FILE = {PARM_BASE}/met_config/EnsembleStatConfig_wrapped -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -ENSEMBLE_STAT_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Observation data time window(s). -# -OBS_FILE_WINDOW_BEGIN = 0 -OBS_FILE_WINDOW_END = 0 -OBS_ENSEMBLE_STAT_WINDOW_BEGIN = 0 -OBS_ENSEMBLE_STAT_WINDOW_END = 0 - -# number of expected members for ensemble. Should correspond with the -# number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE -ENSEMBLE_STAT_N_MEMBERS = {{num_ens_members}} - -# ens.ens_thresh value in the MET config file -# threshold for ratio of valid files to expected files to allow app to run -ENSEMBLE_STAT_ENS_THRESH = 0.05 - -# ens.vld_thresh value in the MET config file -ENSEMBLE_STAT_ENS_VLD_THRESH = 1.0 - -# ENSEMBLE_STAT_MET_OBS_ERR_TABLE is not required. -# If the variable is not defined, or the value is not set, then the MET -# default is used. -ENSEMBLE_STAT_MET_OBS_ERR_TABLE = {MET_BASE}/table_files/obs_error_table.txt - - -# Used in the MET config file for: regrid to_grid field -ENSEMBLE_STAT_REGRID_TO_GRID = FCST -ENSEMBLE_STAT_REGRID_METHOD = BUDGET -ENSEMBLE_STAT_REGRID_WIDTH = 2 -ENSEMBLE_STAT_REGRID_VLD_THRESH = 0.5 -ENSEMBLE_STAT_REGRID_SHAPE = SQUARE - -ENSEMBLE_STAT_CENSOR_THRESH = -ENSEMBLE_STAT_CENSOR_VAL = - -ENSEMBLE_STAT_MESSAGE_TYPE = -ENSEMBLE_STAT_DUPLICATE_FLAG = UNIQUE -ENSEMBLE_STAT_SKIP_CONST = TRUE -ENSEMBLE_STAT_OBS_ERROR_FLAG = TRUE - -ENSEMBLE_STAT_ENS_SSVAR_BIN_SIZE = 1.0 -ENSEMBLE_STAT_ENS_PHIST_BIN_SIZE = 0.05 - -#ENSEMBLE_STAT_CLIMO_MEAN_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_MEAN_FIELD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_MEAN_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_MEAN_HOUR_INTERVAL = 6 - -#ENSEMBLE_STAT_CLIMO_STDEV_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_STDEV_FIELD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_STDEV_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_STDEV_HOUR_INTERVAL = 6 - - -ENSEMBLE_STAT_CLIMO_CDF_BINS = 1 -ENSEMBLE_STAT_CLIMO_CDF_CENTER_BINS = False -ENSEMBLE_STAT_CLIMO_CDF_WRITE_BINS = False - -ENSEMBLE_STAT_MASK_GRID = - -ENSEMBLE_STAT_CI_ALPHA = 0.05 - -ENSEMBLE_STAT_INTERP_FIELD = BOTH -ENSEMBLE_STAT_INTERP_VLD_THRESH = 1.0 -ENSEMBLE_STAT_INTERP_SHAPE = SQUARE -ENSEMBLE_STAT_INTERP_METHOD = NEAREST -ENSEMBLE_STAT_INTERP_WIDTH = 1 - -ENSEMBLE_STAT_OUTPUT_FLAG_ECNT = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RPS = NONE -ENSEMBLE_STAT_OUTPUT_FLAG_RHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_PHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_ORANK = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_SSVAR = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RELP = STAT - -ENSEMBLE_STAT_ENSEMBLE_FLAG_RANK = FALSE -ENSEMBLE_STAT_ENSEMBLE_FLAG_WEIGHT = FALSE -# -# Forecast and observation variables and levels as specified in the fcst -# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME, -# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME, -# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION. -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -FCST_VAR1_LEVELS = A{{accum_hh}} -FCST_VAR1_OPTIONS = ens_ssvar_bin_size = 50.0; - ens_phist_bin_size = 0.05; -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = A{{accum_hh}} -OBS_VAR1_OPTIONS = {FCST_VAR1_OPTIONS}; - convert(x) = 100.0*x; - -[dir] -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -# -# Point observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_DIR = -# -# Grid observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_DIR = {{obs_input_dir}} -# -# Forecast model input directory for EnsembleStat. -# -FCST_ENSEMBLE_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Output directory for EnsembleStat. -# -ENSEMBLE_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for point observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_POINT_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_TEMPLATE = -# -# Template for gridded observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_GRID_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to EnsembleStat relative to -# FCST_ENSEMBLE_STAT_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from EnsembleStat relative to ENSEMBLE_STAT_OUTPUT_DIR. -# -ENSEMBLE_STAT_OUTPUT_TEMPLATE = -ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# EnsembleStat. Not used for this example. -# -ENSEMBLE_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/EnsembleStat_REFC.conf b/parm/metplus/EnsembleStat_REFC.conf deleted file mode 100644 index 6de6eddeb8..0000000000 --- a/parm/metplus/EnsembleStat_REFC.conf +++ /dev/null @@ -1,265 +0,0 @@ -# EnsembleStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = EnsembleStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {ENSEMBLE_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to EnsembleStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -ENSEMBLE_STAT_CONFIG_FILE = {PARM_BASE}/met_config/EnsembleStatConfig_wrapped -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -ENSEMBLE_STAT_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Observation data time window(s). -# -OBS_FILE_WINDOW_BEGIN = -300 -OBS_FILE_WINDOW_END = 300 -OBS_ENSEMBLE_STAT_WINDOW_BEGIN = 0 -OBS_ENSEMBLE_STAT_WINDOW_END = 0 - -# number of expected members for ensemble. Should correspond with the -# number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE -ENSEMBLE_STAT_N_MEMBERS = {{num_ens_members}} - -# ens.ens_thresh value in the MET config file -# threshold for ratio of valid files to expected files to allow app to run -ENSEMBLE_STAT_ENS_THRESH = 0.05 - -# ens.vld_thresh value in the MET config file -ENSEMBLE_STAT_ENS_VLD_THRESH = 1.0 - -# ENSEMBLE_STAT_MET_OBS_ERR_TABLE is not required. -# If the variable is not defined, or the value is not set, then the MET -# default is used. -ENSEMBLE_STAT_MET_OBS_ERR_TABLE = - - -# Used in the MET config file for: regrid to_grid field -ENSEMBLE_STAT_REGRID_TO_GRID = FCST -ENSEMBLE_STAT_REGRID_METHOD = BUDGET -ENSEMBLE_STAT_REGRID_WIDTH = 2 -ENSEMBLE_STAT_REGRID_VLD_THRESH = 0.5 -ENSEMBLE_STAT_REGRID_SHAPE = SQUARE - -ENSEMBLE_STAT_CENSOR_THRESH = -ENSEMBLE_STAT_CENSOR_VAL = - -# Should this parameter be set to something other than ADPSFC (maybe -# just leave empty) since we are not verifying surface fields? -ENSEMBLE_STAT_MESSAGE_TYPE = ADPSFC -ENSEMBLE_STAT_DUPLICATE_FLAG = NONE -ENSEMBLE_STAT_SKIP_CONST = TRUE -ENSEMBLE_STAT_OBS_ERROR_FLAG = FALSE - -ENSEMBLE_STAT_ENS_SSVAR_BIN_SIZE = 1.0 -ENSEMBLE_STAT_ENS_PHIST_BIN_SIZE = 0.05 - -#ENSEMBLE_STAT_CLIMO_MEAN_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_MEAN_FIELD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_MEAN_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_MEAN_HOUR_INTERVAL = 6 - -#ENSEMBLE_STAT_CLIMO_STDEV_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_STDEV_FIELD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_STDEV_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_STDEV_HOUR_INTERVAL = 6 - - -ENSEMBLE_STAT_CLIMO_CDF_BINS = 1 -ENSEMBLE_STAT_CLIMO_CDF_CENTER_BINS = False -ENSEMBLE_STAT_CLIMO_CDF_WRITE_BINS = False - -ENSEMBLE_STAT_MASK_GRID = FULL - -ENSEMBLE_STAT_CI_ALPHA = 0.05 - -ENSEMBLE_STAT_INTERP_FIELD = BOTH -ENSEMBLE_STAT_INTERP_VLD_THRESH = 1.0 -ENSEMBLE_STAT_INTERP_SHAPE = SQUARE -ENSEMBLE_STAT_INTERP_METHOD = NEAREST -ENSEMBLE_STAT_INTERP_WIDTH = 1 - -ENSEMBLE_STAT_OUTPUT_FLAG_ECNT = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RPS = NONE -ENSEMBLE_STAT_OUTPUT_FLAG_RHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_PHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_ORANK = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_SSVAR = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RELP = STAT - -ENSEMBLE_STAT_ENSEMBLE_FLAG_RANK = FALSE -ENSEMBLE_STAT_ENSEMBLE_FLAG_WEIGHT = FALSE -# -# Forecast and observation variables and levels as specified in the fcst -# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME, -# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME, -# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION. -# -FCST_VAR1_NAME = {{fieldname_in_fcst_input}} -FCST_VAR1_LEVELS = L0 -FCST_VAR1_OPTIONS = ens_ssvar_bin_size = 50.0; - ens_phist_bin_size = 0.05; -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = Z500 -OBS_VAR1_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - ens_ssvar_bin_size = 50.0; - ens_phist_bin_size = 0.05; - -[dir] -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -# -# Point observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_DIR = -# -# Grid observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_DIR = {{obs_input_dir}} -# -# Forecast model input directory for EnsembleStat. -# -FCST_ENSEMBLE_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Output directory for EnsembleStat. -# -ENSEMBLE_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for point observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_POINT_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_TEMPLATE = -# -# Template for gridded observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_GRID_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to EnsembleStat relative to -# FCST_ENSEMBLE_STAT_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from EnsembleStat relative to ENSEMBLE_STAT_OUTPUT_DIR. -# -ENSEMBLE_STAT_OUTPUT_TEMPLATE = -ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# EnsembleStat. Not used for this example. -# -ENSEMBLE_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/EnsembleStat_RETOP.conf b/parm/metplus/EnsembleStat_RETOP.conf deleted file mode 100644 index abd2dd2a45..0000000000 --- a/parm/metplus/EnsembleStat_RETOP.conf +++ /dev/null @@ -1,267 +0,0 @@ -# EnsembleStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = EnsembleStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {ENSEMBLE_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to EnsembleStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -ENSEMBLE_STAT_CONFIG_FILE = {PARM_BASE}/met_config/EnsembleStatConfig_wrapped -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -ENSEMBLE_STAT_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Observation data time window(s). -# -OBS_FILE_WINDOW_BEGIN = -300 -OBS_FILE_WINDOW_END = 300 -OBS_ENSEMBLE_STAT_WINDOW_BEGIN = 0 -OBS_ENSEMBLE_STAT_WINDOW_END = 0 - -# number of expected members for ensemble. Should correspond with the -# number of items in the list for FCST_ENSEMBLE_STAT_INPUT_TEMPLATE -ENSEMBLE_STAT_N_MEMBERS = {{num_ens_members}} - -# ens.ens_thresh value in the MET config file -# threshold for ratio of valid files to expected files to allow app to run -ENSEMBLE_STAT_ENS_THRESH = 0.05 - -# ens.vld_thresh value in the MET config file -ENSEMBLE_STAT_ENS_VLD_THRESH = 1.0 - -# ENSEMBLE_STAT_MET_OBS_ERR_TABLE is not required. -# If the variable is not defined, or the value is not set, then the MET -# default is used. -ENSEMBLE_STAT_MET_OBS_ERR_TABLE = - - -# Used in the MET config file for: regrid to_grid field -ENSEMBLE_STAT_REGRID_TO_GRID = FCST -ENSEMBLE_STAT_REGRID_METHOD = BUDGET -ENSEMBLE_STAT_REGRID_WIDTH = 2 -ENSEMBLE_STAT_REGRID_VLD_THRESH = 0.5 -ENSEMBLE_STAT_REGRID_SHAPE = SQUARE - -ENSEMBLE_STAT_CENSOR_THRESH = -ENSEMBLE_STAT_CENSOR_VAL = - -# Should this parameter be set to something other than ADPSFC (maybe -# just leave empty) since we are not verifying surface fields? -ENSEMBLE_STAT_MESSAGE_TYPE = ADPSFC -ENSEMBLE_STAT_DUPLICATE_FLAG = NONE -ENSEMBLE_STAT_SKIP_CONST = TRUE -ENSEMBLE_STAT_OBS_ERROR_FLAG = FALSE - -ENSEMBLE_STAT_ENS_SSVAR_BIN_SIZE = 1.0 -ENSEMBLE_STAT_ENS_PHIST_BIN_SIZE = 0.05 - -#ENSEMBLE_STAT_CLIMO_MEAN_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_MEAN_FIELD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_MEAN_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_MEAN_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_MEAN_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_MEAN_HOUR_INTERVAL = 6 - -#ENSEMBLE_STAT_CLIMO_STDEV_FILE_NAME = -#ENSEMBLE_STAT_CLIMO_STDEV_FIELD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_WIDTH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#ENSEMBLE_STAT_CLIMO_STDEV_REGRID_SHAPE = -#ENSEMBLE_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#ENSEMBLE_STAT_CLIMO_STDEV_MATCH_MONTH = -#ENSEMBLE_STAT_CLIMO_STDEV_DAY_INTERVAL = 31 -#ENSEMBLE_STAT_CLIMO_STDEV_HOUR_INTERVAL = 6 - - -ENSEMBLE_STAT_CLIMO_CDF_BINS = 1 -ENSEMBLE_STAT_CLIMO_CDF_CENTER_BINS = False -ENSEMBLE_STAT_CLIMO_CDF_WRITE_BINS = False - -ENSEMBLE_STAT_MASK_GRID = FULL - -ENSEMBLE_STAT_CI_ALPHA = 0.05 - -ENSEMBLE_STAT_INTERP_FIELD = BOTH -ENSEMBLE_STAT_INTERP_VLD_THRESH = 1.0 -ENSEMBLE_STAT_INTERP_SHAPE = SQUARE -ENSEMBLE_STAT_INTERP_METHOD = NEAREST -ENSEMBLE_STAT_INTERP_WIDTH = 1 - -ENSEMBLE_STAT_OUTPUT_FLAG_ECNT = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RPS = NONE -ENSEMBLE_STAT_OUTPUT_FLAG_RHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_PHIST = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_ORANK = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_SSVAR = STAT -ENSEMBLE_STAT_OUTPUT_FLAG_RELP = STAT - -ENSEMBLE_STAT_ENSEMBLE_FLAG_RANK = FALSE -ENSEMBLE_STAT_ENSEMBLE_FLAG_WEIGHT = FALSE -# -# Forecast and observation variables and levels as specified in the fcst -# field dictionary of the MET configuration file. Specify as FCST_VARn_NAME, -# FCST_VARn_LEVELS, (optional) FCST_VARn_OPTION and OBS_VARn_NAME, -# OBS_VARn_LEVELS, (optional) OBS_VARn_OPTION. -# -FCST_VAR1_NAME = {{fieldname_in_fcst_input}} -FCST_VAR1_LEVELS = L0 -FCST_VAR1_OPTIONS = convert(x) = x * 3.28084 * 0.001; ;; Convert from meters to kilofeet. - ens_ssvar_bin_size = 50.0; - ens_phist_bin_size = 0.05; -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = Z500 -OBS_VAR1_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; ;; Convert from kilometers to kilofeet. - ens_ssvar_bin_size = 50.0; - ens_phist_bin_size = 0.05; - -[dir] -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -# -# Point observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_DIR = -# -# Grid observation input directory for EnsembleStat. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_DIR = {{obs_input_dir}} -# -# Forecast model input directory for EnsembleStat. -# -FCST_ENSEMBLE_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to EnsembleStat. Not used -# in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Output directory for EnsembleStat. -# -ENSEMBLE_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for point observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_POINT_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_POINT_INPUT_TEMPLATE = -# -# Template for gridded observation input to EnsembleStat relative to -# OBS_ENSEMBLE_STAT_GRID_INPUT_DIR. -# -OBS_ENSEMBLE_STAT_GRID_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to EnsembleStat relative to -# FCST_ENSEMBLE_STAT_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -FCST_ENSEMBLE_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from EnsembleStat relative to ENSEMBLE_STAT_OUTPUT_DIR. -# -ENSEMBLE_STAT_OUTPUT_TEMPLATE = -ENSEMBLE_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to EnsembleStat relative to -# ENSEMBLE_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -ENSEMBLE_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# EnsembleStat. Not used for this example. -# -ENSEMBLE_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GenEnsProd.conf b/parm/metplus/GenEnsProd.conf new file mode 100644 index 0000000000..6c47cedb0d --- /dev/null +++ b/parm/metplus/GenEnsProd.conf @@ -0,0 +1,432 @@ +# {{MetplusToolName}} METplus Configuration + +[config] + +# List of applications (tools) to run. +PROCESS_LIST = {{MetplusToolName}} + +# time looping - options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +LOOP_BY = INIT + +# Format of INIT_BEG and INIT_END using % items +# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. +# see www.strftime.org for more information +# %Y%m%d%H expands to YYYYMMDDHH +INIT_TIME_FMT = %Y%m%d%H + +# Start time for METplus run - must match INIT_TIME_FMT +INIT_BEG = {{cdate}} + +# End time for METplus run - must match INIT_TIME_FMT +INIT_END = {{cdate}} + +# Increment between METplus runs (in seconds if no units are specified). +# Must be >= 60 seconds. +INIT_INCREMENT = 3600 + +# List of forecast leads to process for each run time (init or valid) +# In hours if units are not specified +# If unset, defaults to 0 (don't loop through forecast leads) +LEAD_SEQ = {{fhr_list}} +# +# Order of loops to process data - Options are times, processes +# Not relevant if only one item is in the PROCESS_LIST +# times = run all wrappers in the PROCESS_LIST for a single run time, then +# increment the run time and run all wrappers again until all times have +# been evaluated. +# processes = run the first wrapper in the PROCESS_LIST for all times +# specified, then repeat for the next item in the PROCESS_LIST until all +# wrappers have been run +# +LOOP_ORDER = times +# +# Specify the name of the METplus log file. +# +LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} +# +# Specify the location and name of the final METplus conf file. +# +METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}} +# +# Name to identify model (forecast) data in output. +# +MODEL = {{vx_fcst_model_name}} + +{{METPLUS_TOOL_NAME}}_DESC = NA +# +# Name to identify observation data in output. +# +OBTYPE = {{obtype}} +# +# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. +# +LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} + +### +# File I/O +### + +# +# Forecast model input directory for {{MetplusToolName}}. +# +{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}} +# +# Template for forecast input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +# Note that this can be a comma separated list of ensemble members +# or a single line, - filename wildcard characters may be used, ? or *. +# +{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}} + +# {{METPLUS_TOOL_NAME}}_CTRL_INPUT_DIR = {INPUT_BASE} +# {{METPLUS_TOOL_NAME}}_CTRL_INPUT_TEMPLATE = +# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2 + +# +# Output directory for {{MetplusToolName}}. +# +# OUTPUT_BASE apparently has to be set to something; it cannot be left +# to its default value. But it is not explicitly used elsewhere in this +# configuration file. +# +OUTPUT_BASE = {{output_base}} +{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}} +# +# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR. +# +{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE = {{metplus_tool_name}}_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc +# +# Directory for staging data. +# +STAGING_DIR = {{staging_dir}} +# +# There are n ensembles but 1 is used as control, so specify n-1 members. +# +{{METPLUS_TOOL_NAME}}_N_MEMBERS = {{num_ens_members}} + +### +# Field Info +### +# +# Ensemble variables and levels as specified in the ens field dictionary +# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS, +# (optional) ENS_VARn_OPTION +# +{#- +Import the file containing jinja macros. +#} +{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %} + +{#- +Jinja requires certain variables to be defined globally within the template +before they can be used in if-statements and other scopes (see Jinja scoping +rules). Define such variables. +#} +{%- set indx_input_thresh_fcst = '' %} +{%- set error_msg = '' %} +{%- set opts_indent = '' %} +{%- set opts_indent_len = '' %} + +{%- set field_fcst = '' %} +{%- set level_fcst = '' %} +{%- set thresh_fcst = '' %} + +{%- set threshes_fcst = [] %} + +{#- +Get the set of valid field groups and ensure that the specified input +field group appears in this list. +#} +{%- set valid_field_groups = vx_config_dict.keys()|list %} +{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }} + +{#- +Reset the input forecast level so that if it happens to be an accumulation +(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g. +reset to 'A3'). +#} +{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} + +{#- +Extract from the configuration dictionary the set (which itself is a +dictionary) of fields, levels, and thresholds corresponding to the input +field group. Then set the delimiter string that separates forecast and +observation values in the various items (i.e. dictionary keys and values +representing field names, levels, and thresholds) in this dictionary. +#} +{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %} +{%- set delim_str = metplus_macros.set_delim_str() %} + +{#- +Loop over the fields and set field names, levels, thresholds, and/or +options for each forecast field in the METplus configuration file. Note +that GenEnsProd only deals with forecasts; it does not consider observations. +#} +{%- set ns = namespace(var_count = 0) %} +{%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %} + + {%- if delim_str in field_cpld %} + {%- set field_fcst = field_cpld.split(delim_str)[0] %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- endif %} + +{#- +For convenience, create list of valid forecast levels for the current +field. +#} + {%- set valid_levels_fcst = [] %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + {%- if delim_str in level_cpld %} + {%- set level_fcst = level_cpld.split(delim_str)[0] %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- endif %} + {%- set tmp = valid_levels_fcst.append(level_fcst) %} + {%- endfor %} + +{#- +Make sure that the input forecast level (input_level_fcst) is set to a +valid value. +#} + {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %} + {%- set error_msg = '\n' ~ +'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~ +'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~ +'for the current forecast field (field_fcst). This is not the case:\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~ +' input_level_fcst = ' ~ input_level_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + +{#- +Loop over the (coupled) levels and corresponding lists of thresholds. +Extract from these the level values for forecasts and use them to set the +forecast field names, levels, thresholds, and/or options in the METplus +configuration file. +#} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + + {%- if delim_str in level_cpld %} + {%- set level_fcst = level_cpld.split(delim_str)[0] %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- endif %} + + {%- set valid_threshes_fcst = [] %} + {%- for thresh_cpld in threshes_cpld %} + {%- if delim_str in thresh_cpld %} + {%- set thresh_fcst = thresh_cpld.split(delim_str)[0] %} + {%- else %} + {%- set thresh_fcst = thresh_cpld %} + {%- endif %} + {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %} + {%- endfor %} + + {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %} +{#- +Increment the METplus variable counter. +#} + {%- set ns.var_count = ns.var_count+1 %} + +{#- +Set forecast field name. Note that this has to exactly match the name +of the field in the input forecast file. + +For accumulated fields, the input forecast file is generated by MET's +PcpCombine tool. In that file, the field name consists of the forecast +field name here (field_fcst) with the accumulation period appended to +it (separated by an underscore), so we must do the same here to get an +exact match. +#} + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +ENS_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{accum_hh}} + {%- else %} +ENS_VAR{{ns.var_count}}_NAME = {{field_fcst}} + {%- endif %} + +{#- +Set forecast field level. +#} +ENS_VAR{{ns.var_count}}_LEVELS = {{level_fcst}} + +{#- +Set forecast field threshold(s). Note that no forecast thresholds are +included in the METplus configuration file if input_thresh_fcst is set +to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +If input_thresh_fcst is set to 'all', set the list of forecast thresholds +to the full set of valid values. +#} + {%- if (input_thresh_fcst == 'all') %} + + {%- set threshes_fcst = valid_threshes_fcst %} +{#- +If input_thresh_fcst is set to a specific value: +* If that value is valid, i.e. it exists in the list of valid forecast + thresholds, get its index in that list and use it to set the forecast + threshold to a one-element list containing that value. +* If the input forecast threshold is not valid, print out a warning message + and exit. +#} + {%- else %} + + {%- if input_thresh_fcst in valid_threshes_fcst %} + {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %} + {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %} + {%- else %} + {%- set error_msg = '\n' ~ +'For the current forecast field (field_fcst) and forecast level (level_fcst),\n' ~ +'the input forecast threshold (input_thresh_fcst) does not exist in the list\n' ~ +'of valid forecast thresholds (valid_threshes_fcst):\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' level_fcst = ' ~ level_fcst ~ '\n' ~ +' valid_threshes_fcst = ' ~ valid_threshes_fcst ~ '\n' ~ +' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + + {%- endif %} +{#- +If threshes_fcst has been reset to something other than its default +value of an empty list, then set the forecast thresholds in the METplus +configuration file because that implies threshes_fcst was set above to +a non-empty value. Then reset threshes_fcst to its default value for +proper processing of thresholds for the next field. +#} + {%- if (threshes_fcst != []) %} +ENS_VAR{{ns.var_count}}_THRESH = {{threshes_fcst|join(', ')}} + {%- endif %} + {%- set threshes_fcst = [] %} + + {%- endif %} + +{#- +Set forecast field options. +#} + {%- set opts_indent_len = 19 %} + {%- if (ns.var_count > 9) and (ns.var_count <= 99) %} + {%- set opts_indent_len = opts_indent_len + 1 %} + {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %} + {%- set opts_indent_len = opts_indent_len + 2 %} + {%- elif (ns.var_count > 999) %} + {%- set opts_indent_len = opts_indent_len + 3 %} + {%- endif %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'RETOP' %} + + {%- if field_fcst == 'RETOP' %} +ENS_VAR{{ns.var_count}}_OPTIONS = convert(x) = x * 3.28084 * 0.001; ;; Convert from meters to kilofeet. + {%- endif %} + + {%- elif input_field_group == 'ADPSFC' %} + + {%- if field_fcst == 'HGT' %} +ENS_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 215; +{{opts_indent}}desc = "CEILING"; + {%- elif field_fcst == 'TCDC' %} +ENS_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 200; +{{opts_indent}}GRIB2_ipdtmpl_index=[27]; +{{opts_indent}}GRIB2_ipdtmpl_val=[255]; +{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- elif field_fcst == 'VIS' %} +ENS_VAR{{ns.var_count}}_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- elif field_fcst == 'WIND' %} +ENS_VAR{{ns.var_count}}_OPTIONS = GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind. + {%- endif %} + + {%- elif input_field_group == 'ADPUPA' %} + + {%- if field_fcst == 'CAPE' %} +ENS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; + {%- endif %} + + {%- endif %} + +{#- +Print out a newline to separate the settings for the current field from +those for the next field. +#} + {{- '\n' }} + + {%- endif %} + + {%- endfor %} +{%- endfor %} +### +# {{MetplusToolName}} +### + +# {{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = NONE +# {{METPLUS_TOOL_NAME}}_REGRID_METHOD = NEAREST +# {{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 1 +# {{METPLUS_TOOL_NAME}}_REGRID_VLD_THRESH = 0.5 +# {{METPLUS_TOOL_NAME}}_REGRID_SHAPE = SQUARE + +# {{METPLUS_TOOL_NAME}}_CENSOR_THRESH = +# {{METPLUS_TOOL_NAME}}_CENSOR_VAL = +# {{METPLUS_TOOL_NAME}}_CAT_THRESH = +# {{METPLUS_TOOL_NAME}}_NC_VAR_STR = + +# Threshold for ratio of valid files to expected files to allow app to run +{{METPLUS_TOOL_NAME}}_ENS_THRESH = 0.05 + +{{METPLUS_TOOL_NAME}}_NBRHD_PROB_WIDTH = 27 +{{METPLUS_TOOL_NAME}}_NBRHD_PROB_SHAPE = CIRCLE +{{METPLUS_TOOL_NAME}}_NBRHD_PROB_VLD_THRESH = 0.0 + +# {{METPLUS_TOOL_NAME}}_NMEP_SMOOTH_VLD_THRESH = 0.0 +# {{METPLUS_TOOL_NAME}}_NMEP_SMOOTH_SHAPE = CIRCLE +# {{METPLUS_TOOL_NAME}}_NMEP_SMOOTH_GAUSSIAN_DX = 81.27 +# {{METPLUS_TOOL_NAME}}_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120 +# {{METPLUS_TOOL_NAME}}_NMEP_SMOOTH_METHOD = GAUSSIAN +# {{METPLUS_TOOL_NAME}}_NMEP_SMOOTH_WIDTH = 1 + +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FILE_NAME = +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FIELD = +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_METHOD = +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_WIDTH = +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_VLD_THRESH = +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_SHAPE = +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_MATCH_MONTH = +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_DAY_INTERVAL = 31 +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_HOUR_INTERVAL = 6 + +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FILE_NAME = +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FIELD = +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_METHOD = +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_WIDTH = +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_VLD_THRESH = +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_SHAPE = +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_TIME_INTERP_METHOD = +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_MATCH_MONTH = +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_DAY_INTERVAL = 31 +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_HOUR_INTERVAL = 6 + +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_LATLON = TRUE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_MEAN = TRUE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_STDEV = TRUE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_MINUS = FALSE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_PLUS = FALSE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_MIN = FALSE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_MAX = FALSE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_RANGE = TRUE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_VLD_COUNT = TRUE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_FREQUENCY = TRUE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_NEP = TRUE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_NMEP = TRUE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_CLIMO = FALSE +{{METPLUS_TOOL_NAME}}_ENSEMBLE_FLAG_CLIMO_CDF = FALSE + +# {{METPLUS_TOOL_NAME}}_ENS_MEMBER_IDS = +# {{METPLUS_TOOL_NAME}}_CONTROL_ID = diff --git a/parm/metplus/GenEnsProd_ADPSFC.conf b/parm/metplus/GenEnsProd_ADPSFC.conf deleted file mode 100644 index cb253f575b..0000000000 --- a/parm/metplus/GenEnsProd_ADPSFC.conf +++ /dev/null @@ -1,219 +0,0 @@ -# GenEnsProd METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GenEnsProd - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GEN_ENS_PROD_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -GEN_ENS_PROD_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} - -### -# File I/O -### - -# -# Forecast model input directory for GenEnsProd. -# -GEN_ENS_PROD_INPUT_DIR = {{fcst_input_dir}} -# -# Template for forecast input to GenEnsProd relative to -# GEN_ENS_PROD_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -GEN_ENS_PROD_INPUT_TEMPLATE = {{fcst_input_fn_template}} - -# GEN_ENS_PROD_CTRL_INPUT_DIR = {INPUT_BASE} -# GEN_ENS_PROD_CTRL_INPUT_TEMPLATE = -# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2 - -# -# Output directory for GenEnsProd. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GEN_ENS_PROD_OUTPUT_DIR = {{output_dir}} -# -# Template for output from GenEnsProd relative to GEN_ENS_PROD_OUTPUT_DIR. -# -GEN_ENS_PROD_OUTPUT_TEMPLATE = gen_ens_prod_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} -# -# There are n ensembles but 1 is used as control, so specify n-1 members. -# -GEN_ENS_PROD_N_MEMBERS = {{num_ens_members}} - -### -# Field Info -### -# -# Ensemble variables and levels as specified in the ens field dictionary -# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS, -# (optional) ENS_VARn_OPTION -# -ENS_VAR1_NAME = TMP -ENS_VAR1_LEVELS = Z02 -ENS_VAR1_THRESH = ge268, ge273, ge278, ge293, ge298, ge303 - -ENS_VAR2_NAME = DPT -ENS_VAR2_LEVELS = Z2 -ENS_VAR2_THRESH = ge263, ge268, ge273, ge288, ge293, ge298 - -ENS_VAR3_NAME = WIND -ENS_VAR3_LEVELS = Z10 -ENS_VAR3_THRESH = ge5, ge10, ge15 -ENS_VAR3_OPTIONS = GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind. - -ENS_VAR4_NAME = TCDC -ENS_VAR4_LEVELS = L0 -ENS_VAR4_THRESH = lt25, gt75 -ENS_VAR4_OPTIONS = GRIB_lvl_typ = 200; - GRIB2_ipdtmpl_index=[27]; - GRIB2_ipdtmpl_val=[255]; - interp = { type = [ { method = NEAREST; width = 1; } ]; } - -ENS_VAR5_NAME = VIS -ENS_VAR5_LEVELS = L0 -ENS_VAR5_THRESH = lt1609, lt8045, ge8045 -ENS_VAR5_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } - -ENS_VAR6_NAME = HGT -ENS_VAR6_LEVELS = L0 -ENS_VAR6_THRESH = lt152, lt1520, ge914 -ENS_VAR6_OPTIONS = GRIB_lvl_typ = 215; - desc = "CEILING"; - -### -# GenEnsProd -### - -# GEN_ENS_PROD_REGRID_TO_GRID = NONE -# GEN_ENS_PROD_REGRID_METHOD = NEAREST -# GEN_ENS_PROD_REGRID_WIDTH = 1 -# GEN_ENS_PROD_REGRID_VLD_THRESH = 0.5 -# GEN_ENS_PROD_REGRID_SHAPE = SQUARE - -# GEN_ENS_PROD_CENSOR_THRESH = -# GEN_ENS_PROD_CENSOR_VAL = -# GEN_ENS_PROD_CAT_THRESH = -# GEN_ENS_PROD_NC_VAR_STR = - -# Threshold for ratio of valid files to expected files to allow app to run -GEN_ENS_PROD_ENS_THRESH = 0.05 - -GEN_ENS_PROD_NBRHD_PROB_WIDTH = 27 -GEN_ENS_PROD_NBRHD_PROB_SHAPE = CIRCLE -GEN_ENS_PROD_NBRHD_PROB_VLD_THRESH = 0.0 - -# GEN_ENS_PROD_NMEP_SMOOTH_VLD_THRESH = 0.0 -# GEN_ENS_PROD_NMEP_SMOOTH_SHAPE = CIRCLE -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_DX = 81.27 -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120 -# GEN_ENS_PROD_NMEP_SMOOTH_METHOD = GAUSSIAN -# GEN_ENS_PROD_NMEP_SMOOTH_WIDTH = 1 - -# GEN_ENS_PROD_CLIMO_MEAN_FILE_NAME = -# GEN_ENS_PROD_CLIMO_MEAN_FIELD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_MEAN_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_MEAN_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_MEAN_HOUR_INTERVAL = 6 - -# GEN_ENS_PROD_CLIMO_STDEV_FILE_NAME = -# GEN_ENS_PROD_CLIMO_STDEV_FIELD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_STDEV_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_STDEV_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_STDEV_HOUR_INTERVAL = 6 - -GEN_ENS_PROD_ENSEMBLE_FLAG_LATLON = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MEAN = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_STDEV = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MINUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_PLUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MIN = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MAX = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_RANGE = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_VLD_COUNT = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_FREQUENCY = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NMEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO_CDF = FALSE - -# GEN_ENS_PROD_ENS_MEMBER_IDS = -# GEN_ENS_PROD_CONTROL_ID = diff --git a/parm/metplus/GenEnsProd_ADPUPA.conf b/parm/metplus/GenEnsProd_ADPUPA.conf deleted file mode 100644 index 863427752f..0000000000 --- a/parm/metplus/GenEnsProd_ADPUPA.conf +++ /dev/null @@ -1,236 +0,0 @@ -# GenEnsProd METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GenEnsProd - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GEN_ENS_PROD_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -GEN_ENS_PROD_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} - -### -# File I/O -### - -# -# Forecast model input directory for GenEnsProd. -# -GEN_ENS_PROD_INPUT_DIR = {{fcst_input_dir}} -# -# Template for forecast input to GenEnsProd relative to -# GEN_ENS_PROD_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -GEN_ENS_PROD_INPUT_TEMPLATE = {{fcst_input_fn_template}} - -# GEN_ENS_PROD_CTRL_INPUT_DIR = {INPUT_BASE} -# GEN_ENS_PROD_CTRL_INPUT_TEMPLATE = -# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2 - -# -# Output directory for GenEnsProd. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GEN_ENS_PROD_OUTPUT_DIR = {{output_dir}} -# -# Template for output from GenEnsProd relative to GEN_ENS_PROD_OUTPUT_DIR. -# -GEN_ENS_PROD_OUTPUT_TEMPLATE = gen_ens_prod_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} -# -# There are n ensembles but 1 is used as control, so specify n-1 members. -# -GEN_ENS_PROD_N_MEMBERS = {{num_ens_members}} - -### -# Field Info -### -# -# Ensemble variables and levels as specified in the ens field dictionary -# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS, -# (optional) ENS_VARn_OPTION -# -ENS_VAR1_NAME = TMP -ENS_VAR1_LEVELS = P850 -ENS_VAR1_THRESH = ge288, ge293, ge298 - -ENS_VAR2_NAME = TMP -ENS_VAR2_LEVELS = P700 -ENS_VAR2_THRESH = ge273, ge278, ge283 - -ENS_VAR3_NAME = TMP -ENS_VAR3_LEVELS = P500 -ENS_VAR3_THRESH = ge258, ge263, ge268 - -ENS_VAR4_NAME = DPT -ENS_VAR4_LEVELS = P850 -ENS_VAR4_THRESH = ge273, ge278, ge283 - -ENS_VAR5_NAME = DPT -ENS_VAR5_LEVELS = P700 -ENS_VAR5_THRESH = ge263, ge268, ge273 - -ENS_VAR6_NAME = WIND -ENS_VAR6_LEVELS = P850 -ENS_VAR6_THRESH = ge5, ge10, ge15 - -ENS_VAR7_NAME = WIND -ENS_VAR7_LEVELS = P700 -ENS_VAR7_THRESH = ge10, ge15, ge20 - -ENS_VAR8_NAME = WIND -ENS_VAR8_LEVELS = P500 -ENS_VAR8_THRESH = ge15, ge21, ge26 - -ENS_VAR9_NAME = WIND -ENS_VAR9_LEVELS = P250 -ENS_VAR9_THRESH = ge26, ge31, ge36, ge46, ge62 - -ENS_VAR10_NAME = HGT -ENS_VAR10_LEVELS = P500 -ENS_VAR10_THRESH = ge5400, ge5600, ge5880 - -ENS_VAR11_NAME = CAPE -ENS_VAR11_LEVELS = L0 -ENS_VAR11_THRESH = le1000, gt1000&<2500, gt2500&<4000, gt2500 -ENS_VAR11_OPTIONS = cnt_thresh = [ >0 ]; - -ENS_VAR12_NAME = HPBL -ENS_VAR12_LEVELS = Z0 -ENS_VAR12_THRESH = lt500, lt1500, gt1500 - -### -# GenEnsProd -### - -# GEN_ENS_PROD_REGRID_TO_GRID = NONE -# GEN_ENS_PROD_REGRID_METHOD = NEAREST -# GEN_ENS_PROD_REGRID_WIDTH = 1 -# GEN_ENS_PROD_REGRID_VLD_THRESH = 0.5 -# GEN_ENS_PROD_REGRID_SHAPE = SQUARE - -# GEN_ENS_PROD_CENSOR_THRESH = -# GEN_ENS_PROD_CENSOR_VAL = -# GEN_ENS_PROD_CAT_THRESH = -# GEN_ENS_PROD_NC_VAR_STR = - -# Threshold for ratio of valid files to expected files to allow app to run -GEN_ENS_PROD_ENS_THRESH = 0.05 - -GEN_ENS_PROD_NBRHD_PROB_WIDTH = 27 -GEN_ENS_PROD_NBRHD_PROB_SHAPE = CIRCLE -GEN_ENS_PROD_NBRHD_PROB_VLD_THRESH = 0.0 - -# GEN_ENS_PROD_NMEP_SMOOTH_VLD_THRESH = 0.0 -# GEN_ENS_PROD_NMEP_SMOOTH_SHAPE = CIRCLE -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_DX = 81.27 -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120 -# GEN_ENS_PROD_NMEP_SMOOTH_METHOD = GAUSSIAN -# GEN_ENS_PROD_NMEP_SMOOTH_WIDTH = 1 - -# GEN_ENS_PROD_CLIMO_MEAN_FILE_NAME = -# GEN_ENS_PROD_CLIMO_MEAN_FIELD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_MEAN_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_MEAN_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_MEAN_HOUR_INTERVAL = 6 - -# GEN_ENS_PROD_CLIMO_STDEV_FILE_NAME = -# GEN_ENS_PROD_CLIMO_STDEV_FIELD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_STDEV_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_STDEV_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_STDEV_HOUR_INTERVAL = 6 - -GEN_ENS_PROD_ENSEMBLE_FLAG_LATLON = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MEAN = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_STDEV = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MINUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_PLUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MIN = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MAX = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_RANGE = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_VLD_COUNT = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_FREQUENCY = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NMEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO_CDF = FALSE - -# GEN_ENS_PROD_ENS_MEMBER_IDS = -# GEN_ENS_PROD_CONTROL_ID = diff --git a/parm/metplus/GenEnsProd_APCP.conf b/parm/metplus/GenEnsProd_APCP.conf deleted file mode 100644 index 0d05843a87..0000000000 --- a/parm/metplus/GenEnsProd_APCP.conf +++ /dev/null @@ -1,191 +0,0 @@ -# GenEnsProd METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GenEnsProd - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GEN_ENS_PROD_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -GEN_ENS_PROD_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} - -### -# File I/O -### - -# -# Forecast model input directory for GenEnsProd. -# -GEN_ENS_PROD_INPUT_DIR = {{fcst_input_dir}} -# -# Template for forecast input to GenEnsProd relative to -# GEN_ENS_PROD_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -GEN_ENS_PROD_INPUT_TEMPLATE = {{fcst_input_fn_template}} - -# GEN_ENS_PROD_CTRL_INPUT_DIR = {INPUT_BASE} -# GEN_ENS_PROD_CTRL_INPUT_TEMPLATE = -# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2 - -# -# Output directory for GenEnsProd. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GEN_ENS_PROD_OUTPUT_DIR = {{output_dir}} -# -# Template for output from GenEnsProd relative to GEN_ENS_PROD_OUTPUT_DIR. -# -GEN_ENS_PROD_OUTPUT_TEMPLATE = gen_ens_prod_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} -# -# There are n ensembles but 1 is used as control, so specify n-1 members. -# -GEN_ENS_PROD_N_MEMBERS = {{num_ens_members}} - -### -# Field Info -### -# -# Ensemble variables and levels as specified in the ens field dictionary -# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS, -# (optional) ENS_VARn_OPTION -# -ENS_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -ENS_VAR1_LEVELS = A{{accum_hh}} -ENS_VAR1_THRESH = {{field_thresholds}} - -### -# GenEnsProd -### - -# GEN_ENS_PROD_REGRID_TO_GRID = NONE -# GEN_ENS_PROD_REGRID_METHOD = NEAREST -# GEN_ENS_PROD_REGRID_WIDTH = 1 -# GEN_ENS_PROD_REGRID_VLD_THRESH = 0.5 -# GEN_ENS_PROD_REGRID_SHAPE = SQUARE - -# GEN_ENS_PROD_CENSOR_THRESH = -# GEN_ENS_PROD_CENSOR_VAL = -# GEN_ENS_PROD_CAT_THRESH = -# GEN_ENS_PROD_NC_VAR_STR = - -# Threshold for ratio of valid files to expected files to allow app to run -GEN_ENS_PROD_ENS_THRESH = 0.05 - -GEN_ENS_PROD_NBRHD_PROB_WIDTH = 27 -GEN_ENS_PROD_NBRHD_PROB_SHAPE = CIRCLE -GEN_ENS_PROD_NBRHD_PROB_VLD_THRESH = 0.0 - -# GEN_ENS_PROD_NMEP_SMOOTH_VLD_THRESH = 0.0 -# GEN_ENS_PROD_NMEP_SMOOTH_SHAPE = CIRCLE -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_DX = 81.27 -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120 -# GEN_ENS_PROD_NMEP_SMOOTH_METHOD = GAUSSIAN -# GEN_ENS_PROD_NMEP_SMOOTH_WIDTH = 1 - -# GEN_ENS_PROD_CLIMO_MEAN_FILE_NAME = -# GEN_ENS_PROD_CLIMO_MEAN_FIELD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_MEAN_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_MEAN_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_MEAN_HOUR_INTERVAL = 6 - -# GEN_ENS_PROD_CLIMO_STDEV_FILE_NAME = -# GEN_ENS_PROD_CLIMO_STDEV_FIELD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_STDEV_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_STDEV_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_STDEV_HOUR_INTERVAL = 6 - -GEN_ENS_PROD_ENSEMBLE_FLAG_LATLON = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MEAN = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_STDEV = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MINUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_PLUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MIN = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MAX = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_RANGE = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_VLD_COUNT = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_FREQUENCY = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NMEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO_CDF = FALSE - -# GEN_ENS_PROD_ENS_MEMBER_IDS = -# GEN_ENS_PROD_CONTROL_ID = diff --git a/parm/metplus/GenEnsProd_ASNOW.conf b/parm/metplus/GenEnsProd_ASNOW.conf deleted file mode 100644 index ea9dac02d9..0000000000 --- a/parm/metplus/GenEnsProd_ASNOW.conf +++ /dev/null @@ -1,192 +0,0 @@ -# GenEnsProd METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GenEnsProd - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GEN_ENS_PROD_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -GEN_ENS_PROD_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} - -### -# File I/O -### - -# -# Forecast model input directory for GenEnsProd. -# -GEN_ENS_PROD_INPUT_DIR = {{fcst_input_dir}} -# -# Template for forecast input to GenEnsProd relative to -# GEN_ENS_PROD_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -GEN_ENS_PROD_INPUT_TEMPLATE = {{fcst_input_fn_template}} - -# GEN_ENS_PROD_CTRL_INPUT_DIR = {INPUT_BASE} -# GEN_ENS_PROD_CTRL_INPUT_TEMPLATE = -# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2 - -# -# Output directory for GenEnsProd. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GEN_ENS_PROD_OUTPUT_DIR = {{output_dir}} -# -# Template for output from GenEnsProd relative to GEN_ENS_PROD_OUTPUT_DIR. -# -GEN_ENS_PROD_OUTPUT_TEMPLATE = gen_ens_prod_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} -# -# There are n ensembles but 1 is used as control, so specify n-1 members. -# -GEN_ENS_PROD_N_MEMBERS = {{num_ens_members}} - -### -# Field Info -### - -# -# Ensemble variables and levels as specified in the ens field dictionary -# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS, -# (optional) ENS_VARn_OPTION -# -ENS_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -ENS_VAR1_LEVELS = A{{accum_hh}} -ENS_VAR1_THRESH = {{field_thresholds}} - -### -# GenEnsProd -### - -# GEN_ENS_PROD_REGRID_TO_GRID = NONE -# GEN_ENS_PROD_REGRID_METHOD = NEAREST -# GEN_ENS_PROD_REGRID_WIDTH = 1 -# GEN_ENS_PROD_REGRID_VLD_THRESH = 0.5 -# GEN_ENS_PROD_REGRID_SHAPE = SQUARE - -# GEN_ENS_PROD_CENSOR_THRESH = -# GEN_ENS_PROD_CENSOR_VAL = -# GEN_ENS_PROD_CAT_THRESH = -# GEN_ENS_PROD_NC_VAR_STR = - -# Threshold for ratio of valid files to expected files to allow app to run -GEN_ENS_PROD_ENS_THRESH = 0.05 - -GEN_ENS_PROD_NBRHD_PROB_WIDTH = 27 -GEN_ENS_PROD_NBRHD_PROB_SHAPE = CIRCLE -GEN_ENS_PROD_NBRHD_PROB_VLD_THRESH = 0.0 - -# GEN_ENS_PROD_NMEP_SMOOTH_VLD_THRESH = 0.0 -# GEN_ENS_PROD_NMEP_SMOOTH_SHAPE = CIRCLE -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_DX = 81.27 -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120 -# GEN_ENS_PROD_NMEP_SMOOTH_METHOD = GAUSSIAN -# GEN_ENS_PROD_NMEP_SMOOTH_WIDTH = 1 - -# GEN_ENS_PROD_CLIMO_MEAN_FILE_NAME = -# GEN_ENS_PROD_CLIMO_MEAN_FIELD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_MEAN_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_MEAN_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_MEAN_HOUR_INTERVAL = 6 - -# GEN_ENS_PROD_CLIMO_STDEV_FILE_NAME = -# GEN_ENS_PROD_CLIMO_STDEV_FIELD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_STDEV_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_STDEV_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_STDEV_HOUR_INTERVAL = 6 - -GEN_ENS_PROD_ENSEMBLE_FLAG_LATLON = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MEAN = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_STDEV = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MINUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_PLUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MIN = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MAX = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_RANGE = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_VLD_COUNT = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_FREQUENCY = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NMEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO_CDF = FALSE - -# GEN_ENS_PROD_ENS_MEMBER_IDS = -# GEN_ENS_PROD_CONTROL_ID = diff --git a/parm/metplus/GenEnsProd_REFC.conf b/parm/metplus/GenEnsProd_REFC.conf deleted file mode 100644 index 553c23f69e..0000000000 --- a/parm/metplus/GenEnsProd_REFC.conf +++ /dev/null @@ -1,191 +0,0 @@ -# GenEnsProd METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GenEnsProd - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GEN_ENS_PROD_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -GEN_ENS_PROD_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} - -### -# File I/O -### - -# -# Forecast model input directory for GenEnsProd. -# -GEN_ENS_PROD_INPUT_DIR = {{fcst_input_dir}} -# -# Template for forecast input to GenEnsProd relative to -# GEN_ENS_PROD_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -GEN_ENS_PROD_INPUT_TEMPLATE = {{fcst_input_fn_template}} - -# GEN_ENS_PROD_CTRL_INPUT_DIR = {INPUT_BASE} -# GEN_ENS_PROD_CTRL_INPUT_TEMPLATE = -# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2 - -# -# Output directory for GenEnsProd. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GEN_ENS_PROD_OUTPUT_DIR = {{output_dir}} -# -# Template for output from GenEnsProd relative to GEN_ENS_PROD_OUTPUT_DIR. -# -GEN_ENS_PROD_OUTPUT_TEMPLATE = gen_ens_prod_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} -# -# There are n ensembles but 1 is used as control, so specify n-1 members. -# -GEN_ENS_PROD_N_MEMBERS = {{num_ens_members}} - -### -# Field Info -### -# -# Ensemble variables and levels as specified in the ens field dictionary -# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS, -# (optional) ENS_VARn_OPTION -# -ENS_VAR1_NAME = {{fieldname_in_fcst_input}} -ENS_VAR1_LEVELS = L0 -ENS_VAR1_THRESH = {{field_thresholds}} - -### -# GenEnsProd -### - -# GEN_ENS_PROD_REGRID_TO_GRID = NONE -# GEN_ENS_PROD_REGRID_METHOD = NEAREST -# GEN_ENS_PROD_REGRID_WIDTH = 1 -# GEN_ENS_PROD_REGRID_VLD_THRESH = 0.5 -# GEN_ENS_PROD_REGRID_SHAPE = SQUARE - -# GEN_ENS_PROD_CENSOR_THRESH = -# GEN_ENS_PROD_CENSOR_VAL = -# GEN_ENS_PROD_CAT_THRESH = -# GEN_ENS_PROD_NC_VAR_STR = - -# Threshold for ratio of valid files to expected files to allow app to run -GEN_ENS_PROD_ENS_THRESH = 0.05 - -GEN_ENS_PROD_NBRHD_PROB_WIDTH = 27 -GEN_ENS_PROD_NBRHD_PROB_SHAPE = CIRCLE -GEN_ENS_PROD_NBRHD_PROB_VLD_THRESH = 0.0 - -# GEN_ENS_PROD_NMEP_SMOOTH_VLD_THRESH = 0.0 -# GEN_ENS_PROD_NMEP_SMOOTH_SHAPE = CIRCLE -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_DX = 81.27 -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120 -# GEN_ENS_PROD_NMEP_SMOOTH_METHOD = GAUSSIAN -# GEN_ENS_PROD_NMEP_SMOOTH_WIDTH = 1 - -# GEN_ENS_PROD_CLIMO_MEAN_FILE_NAME = -# GEN_ENS_PROD_CLIMO_MEAN_FIELD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_MEAN_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_MEAN_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_MEAN_HOUR_INTERVAL = 6 - -# GEN_ENS_PROD_CLIMO_STDEV_FILE_NAME = -# GEN_ENS_PROD_CLIMO_STDEV_FIELD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_STDEV_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_STDEV_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_STDEV_HOUR_INTERVAL = 6 - -GEN_ENS_PROD_ENSEMBLE_FLAG_LATLON = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MEAN = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_STDEV = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MINUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_PLUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MIN = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MAX = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_RANGE = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_VLD_COUNT = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_FREQUENCY = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NMEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO_CDF = FALSE - -# GEN_ENS_PROD_ENS_MEMBER_IDS = -# GEN_ENS_PROD_CONTROL_ID = diff --git a/parm/metplus/GenEnsProd_RETOP.conf b/parm/metplus/GenEnsProd_RETOP.conf deleted file mode 100644 index 49e5e5c3b6..0000000000 --- a/parm/metplus/GenEnsProd_RETOP.conf +++ /dev/null @@ -1,192 +0,0 @@ -# GenEnsProd METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GenEnsProd - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GEN_ENS_PROD_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Name to identify model (forecast) data in output. -# -MODEL = {{vx_fcst_model_name}} - -GEN_ENS_PROD_DESC = NA -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} - -### -# File I/O -### - -# -# Forecast model input directory for GenEnsProd. -# -GEN_ENS_PROD_INPUT_DIR = {{fcst_input_dir}} -# -# Template for forecast input to GenEnsProd relative to -# GEN_ENS_PROD_INPUT_DIR. -# -# Note that this can be a comma separated list of ensemble members -# or a single line, - filename wildcard characters may be used, ? or *. -# -GEN_ENS_PROD_INPUT_TEMPLATE = {{fcst_input_fn_template}} - -# GEN_ENS_PROD_CTRL_INPUT_DIR = {INPUT_BASE} -# GEN_ENS_PROD_CTRL_INPUT_TEMPLATE = -# {init?fmt=%Y%m%d%H}/mem1/postprd/{ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2 - -# -# Output directory for GenEnsProd. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GEN_ENS_PROD_OUTPUT_DIR = {{output_dir}} -# -# Template for output from GenEnsProd relative to GEN_ENS_PROD_OUTPUT_DIR. -# -GEN_ENS_PROD_OUTPUT_TEMPLATE = gen_ens_prod_{MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE}_{lead?fmt=%H%M%S}L_{valid?fmt=%Y%m%d_%H%M%S}V.nc -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} -# -# There are n ensembles but 1 is used as control, so specify n-1 members. -# -GEN_ENS_PROD_N_MEMBERS = {{num_ens_members}} - -### -# Field Info -### -# -# Ensemble variables and levels as specified in the ens field dictionary -# of the MET configuration file. Specify as ENS_VARn_NAME, ENS_VARn_LEVELS, -# (optional) ENS_VARn_OPTION -# -ENS_VAR1_NAME = {{fieldname_in_fcst_input}} -ENS_VAR1_LEVELS = L0 -ENS_VAR1_THRESH = {{field_thresholds}} -ENS_VAR1_OPTIONS = convert(x) = x * 3.28084 * 0.001; ;; Convert from meters to kilofeet. - -### -# GenEnsProd -### - -# GEN_ENS_PROD_REGRID_TO_GRID = NONE -# GEN_ENS_PROD_REGRID_METHOD = NEAREST -# GEN_ENS_PROD_REGRID_WIDTH = 1 -# GEN_ENS_PROD_REGRID_VLD_THRESH = 0.5 -# GEN_ENS_PROD_REGRID_SHAPE = SQUARE - -# GEN_ENS_PROD_CENSOR_THRESH = -# GEN_ENS_PROD_CENSOR_VAL = -# GEN_ENS_PROD_CAT_THRESH = -# GEN_ENS_PROD_NC_VAR_STR = - -# Threshold for ratio of valid files to expected files to allow app to run -GEN_ENS_PROD_ENS_THRESH = 0.05 - -GEN_ENS_PROD_NBRHD_PROB_WIDTH = 27 -GEN_ENS_PROD_NBRHD_PROB_SHAPE = CIRCLE -GEN_ENS_PROD_NBRHD_PROB_VLD_THRESH = 0.0 - -# GEN_ENS_PROD_NMEP_SMOOTH_VLD_THRESH = 0.0 -# GEN_ENS_PROD_NMEP_SMOOTH_SHAPE = CIRCLE -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_DX = 81.27 -# GEN_ENS_PROD_NMEP_SMOOTH_GAUSSIAN_RADIUS = 120 -# GEN_ENS_PROD_NMEP_SMOOTH_METHOD = GAUSSIAN -# GEN_ENS_PROD_NMEP_SMOOTH_WIDTH = 1 - -# GEN_ENS_PROD_CLIMO_MEAN_FILE_NAME = -# GEN_ENS_PROD_CLIMO_MEAN_FIELD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_MEAN_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_MEAN_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_MEAN_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_MEAN_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_MEAN_HOUR_INTERVAL = 6 - -# GEN_ENS_PROD_CLIMO_STDEV_FILE_NAME = -# GEN_ENS_PROD_CLIMO_STDEV_FIELD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_WIDTH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_VLD_THRESH = -# GEN_ENS_PROD_CLIMO_STDEV_REGRID_SHAPE = -# GEN_ENS_PROD_CLIMO_STDEV_TIME_INTERP_METHOD = -# GEN_ENS_PROD_CLIMO_STDEV_MATCH_MONTH = -# GEN_ENS_PROD_CLIMO_STDEV_DAY_INTERVAL = 31 -# GEN_ENS_PROD_CLIMO_STDEV_HOUR_INTERVAL = 6 - -GEN_ENS_PROD_ENSEMBLE_FLAG_LATLON = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MEAN = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_STDEV = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_MINUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_PLUS = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MIN = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_MAX = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_RANGE = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_VLD_COUNT = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_FREQUENCY = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_NMEP = TRUE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO = FALSE -GEN_ENS_PROD_ENSEMBLE_FLAG_CLIMO_CDF = FALSE - -# GEN_ENS_PROD_ENS_MEMBER_IDS = -# GEN_ENS_PROD_CONTROL_ID = diff --git a/parm/metplus/GridStat_APCP.conf b/parm/metplus/GridStat_APCP.conf deleted file mode 100644 index 51e5125951..0000000000 --- a/parm/metplus/GridStat_APCP.conf +++ /dev/null @@ -1,309 +0,0 @@ -# GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -#GRID_STAT_INTERP_FIELD = BOTH -#GRID_STAT_INTERP_VLD_THRESH = 1.0 -#GRID_STAT_INTERP_SHAPE = SQUARE -#GRID_STAT_INTERP_TYPE_METHOD = NEAREST -#GRID_STAT_INTERP_TYPE_WIDTH = 1 - -#GRID_STAT_GRID_WEIGHT_FLAG = -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the forecast ensemble member. This -# makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_{{ensmem_name}} -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. Here, -# we store the value of the original lead in this column, i.e. the lead -# with zero corresponding to the actual start time of the forecast (which -# is (cdate - time_lag)), not to cdate. This is just the lead in -# LEAD_SEQ with the time lag (time_lag) of the current forecast member -# added on. -# -# Uncomment this line only after upgrading to METplus 5.x. -#GRID_STAT_DESC = {lead?fmt=%H%M%S?shift={{time_lag}}} -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# List of forecast and corresponding observation fields to process. -# -# Note on use of set_attr_lead and ensemble member time-lagging: -# ------------------------------------------------------------- -# The set_attr_lead parameter appearing below in [FCST|OBS]_VAR_OPTIONS -# specifies the lead to use both in naming of the output .stat and .nc -# files and for setting the lead values contained in those files. This -# option causes MET/METplus to use the lead values in the variable LEAD_SEQ -# set above, which are the same for all ensemble forecast members (i.e. -# regardless of whether members are time lagged with respect to the -# nominal cycle date specified by cdate). If set_attr_lead were not -# specified as below, then MET/METplus would get the lead from the input -# forecast file, and that would in general differ from one ensemble member -# to the next depending on whether the member is time-lagged. That would -# cause confusion, so here, we always use lead values with zero lead -# corresponding to the nominal cdate. -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -FCST_VAR1_LEVELS = A{{accum_hh}} -FCST_VAR1_THRESH = {{field_thresholds}} -FCST_VAR1_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR1_LEVELS = A{{accum_hh}} -OBS_VAR1_THRESH = {{field_thresholds}} - -# -# Forecast data time window(s). -# -FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = 0 -OBS_GRID_STAT_FILE_WINDOW_END = 0 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = BOTH - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7 - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = STAT -GRID_STAT_OUTPUT_FLAG_CTC = STAT -GRID_STAT_OUTPUT_FLAG_CTS = STAT -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = STAT -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -#GRID_STAT_OUTPUT_FLAG_PCT = NONE -#GRID_STAT_OUTPUT_FLAG_PSTD = NONE -#GRID_STAT_OUTPUT_FLAG_PJC = NONE -#GRID_STAT_OUTPUT_FLAG_PRC = NONE -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT -GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ASNOW.conf b/parm/metplus/GridStat_ASNOW.conf deleted file mode 100644 index 3960a10c30..0000000000 --- a/parm/metplus/GridStat_ASNOW.conf +++ /dev/null @@ -1,283 +0,0 @@ -# GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET config file to pass to GridStat. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -#GRID_STAT_INTERP_FIELD = BOTH -#GRID_STAT_INTERP_VLD_THRESH = 1.0 -#GRID_STAT_INTERP_SHAPE = SQUARE -#GRID_STAT_INTERP_TYPE_METHOD = NEAREST -#GRID_STAT_INTERP_TYPE_WIDTH = 1 - -#GRID_STAT_GRID_WEIGHT_FLAG = - -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the forecast ensemble member. This -# makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_{{ensmem_name}} -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# List of forecast and corresponding observation fields to process. -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -FCST_VAR1_LEVELS = A{{accum_hh}} -FCST_VAR1_THRESH = {{field_thresholds}} -FCST_VAR1_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = A{{accum_hh}} -OBS_VAR1_THRESH = {{field_thresholds}} -OBS_VAR1_OPTIONS = convert(x) = 100.0*x; -# -# Forecast data time window(s). -# -FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = 0 -OBS_GRID_STAT_FILE_WINDOW_END = 0 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = BOTH - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = 5 - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = STAT -GRID_STAT_OUTPUT_FLAG_CTC = STAT -GRID_STAT_OUTPUT_FLAG_CTS = STAT -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = STAT -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -#GRID_STAT_OUTPUT_FLAG_PCT = NONE -#GRID_STAT_OUTPUT_FLAG_PSTD = NONE -#GRID_STAT_OUTPUT_FLAG_PJC = NONE -#GRID_STAT_OUTPUT_FLAG_PRC = NONE -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT -GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_REFC.conf b/parm/metplus/GridStat_REFC.conf deleted file mode 100644 index c7f34d27f9..0000000000 --- a/parm/metplus/GridStat_REFC.conf +++ /dev/null @@ -1,315 +0,0 @@ -# GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -GRID_STAT_INTERP_FIELD = NONE -GRID_STAT_INTERP_VLD_THRESH = 1.0 -GRID_STAT_INTERP_SHAPE = SQUARE -GRID_STAT_INTERP_TYPE_METHOD = NEAREST -GRID_STAT_INTERP_TYPE_WIDTH = 1 - -GRID_STAT_GRID_WEIGHT_FLAG = NONE -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the forecast ensemble member. This -# makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_{{ensmem_name}} -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. Here, -# we store the value of the original lead in this column, i.e. the lead -# with zero corresponding to the actual start time of the forecast (which -# is (cdate - time_lag)), not to cdate. This is just the lead in -# LEAD_SEQ with the time lag (time_lag) of the current forecast member -# added on. -# -# Uncomment this line only after upgrading to METplus 5.x. -#GRID_STAT_DESC = {lead?fmt=%H%M%S?shift={{time_lag}}} -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# List of forecast and corresponding observation fields to process. -# -# Note on use of set_attr_lead and ensemble member time-lagging: -# ------------------------------------------------------------- -# The set_attr_lead parameter appearing below in [FCST|OBS]_VAR_OPTIONS -# specifies the lead to use both in naming of the output .stat and .nc -# files and for setting the lead values contained in those files. This -# option causes MET/METplus to use the lead values in the variable LEAD_SEQ -# set above, which are the same for all ensemble forecast members (i.e. -# regardless of whether members are time lagged with respect to the -# nominal cycle date specified by cdate). If set_attr_lead were not -# specified as below, then MET/METplus would get the lead from the input -# forecast file, and that would in general differ from one ensemble member -# to the next depending on whether the member is time-lagged. That would -# cause confusion, so here, we always use lead values with zero lead -# corresponding to the nominal cdate. -# -FCST_VAR1_NAME = {{fieldname_in_fcst_input}} -FCST_VAR1_LEVELS = L0 -FCST_VAR1_THRESH = {{field_thresholds}} -FCST_VAR1_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = Z500 -OBS_VAR1_THRESH = {{field_thresholds}} -OBS_VAR1_OPTIONS = censor_thresh = [eq-999, <-20]; - censor_val = [-9999, -20]; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - -# -# Forecast data time window(s). -# -#FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -#FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300 -OBS_GRID_STAT_FILE_WINDOW_END = 300 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = BOTH - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = 1,3,5,7 - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = STAT -GRID_STAT_OUTPUT_FLAG_CTC = STAT -GRID_STAT_OUTPUT_FLAG_CTS = STAT -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = STAT -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -#GRID_STAT_OUTPUT_FLAG_PCT = NONE -#GRID_STAT_OUTPUT_FLAG_PSTD = NONE -#GRID_STAT_OUTPUT_FLAG_PJC = NONE -#GRID_STAT_OUTPUT_FLAG_PRC = NONE -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT -GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_RETOP.conf b/parm/metplus/GridStat_RETOP.conf deleted file mode 100644 index be91a0ba03..0000000000 --- a/parm/metplus/GridStat_RETOP.conf +++ /dev/null @@ -1,317 +0,0 @@ -# GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -GRID_STAT_INTERP_FIELD = NONE -GRID_STAT_INTERP_VLD_THRESH = 1.0 -GRID_STAT_INTERP_SHAPE = SQUARE -GRID_STAT_INTERP_TYPE_METHOD = NEAREST -GRID_STAT_INTERP_TYPE_WIDTH = 1 - -GRID_STAT_GRID_WEIGHT_FLAG = NONE -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the forecast ensemble member. This -# makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_{{ensmem_name}} -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. Here, -# we store the value of the original lead in this column, i.e. the lead -# with zero corresponding to the actual start time of the forecast (which -# is (cdate - time_lag)), not to cdate. This is just the lead in -# LEAD_SEQ with the time lag (time_lag) of the current forecast member -# added on. -# -# Uncomment this line only after upgrading to METplus 5.x. -#GRID_STAT_DESC = {lead?fmt=%H%M%S?shift={{time_lag}}} -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# List of forecast and corresponding observation fields to process. -# -# Note on use of set_attr_lead and ensemble member time-lagging: -# ------------------------------------------------------------- -# The set_attr_lead parameter appearing below in [FCST|OBS]_VAR_OPTIONS -# specifies the lead to use both in naming of the output .stat and .nc -# files and for setting the lead values contained in those files. This -# option causes MET/METplus to use the lead values in the variable LEAD_SEQ -# set above, which are the same for all ensemble forecast members (i.e. -# regardless of whether members are time lagged with respect to the -# nominal cycle date specified by cdate). If set_attr_lead were not -# specified as below, then MET/METplus would get the lead from the input -# forecast file, and that would in general differ from one ensemble member -# to the next depending on whether the member is time-lagged. That would -# cause confusion, so here, we always use lead values with zero lead -# corresponding to the nominal cdate. -# -FCST_VAR1_NAME = {{fieldname_in_fcst_input}} -FCST_VAR1_LEVELS = L0 -FCST_VAR1_THRESH = {{field_thresholds}} -FCST_VAR1_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; - convert(x) = x * 3.28084 * 0.001; - cnt_thresh = [ >0 ]; - cnt_logic = UNION; -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = Z500 -OBS_VAR1_THRESH = {{field_thresholds}} -OBS_VAR1_OPTIONS = convert(x) = x * 3280.84 * 0.001; - censor_thresh = [<=-9.84252,eq-3.28084]; - censor_val = [-9999,-16.4042]; - cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -# -# Forecast data time window(s). -# -#FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -#FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300 -OBS_GRID_STAT_FILE_WINDOW_END = 300 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = BOTH - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = 1,3,5,7 - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = STAT -GRID_STAT_OUTPUT_FLAG_CTC = STAT -GRID_STAT_OUTPUT_FLAG_CTS = STAT -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = STAT -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -#GRID_STAT_OUTPUT_FLAG_PCT = NONE -#GRID_STAT_OUTPUT_FLAG_PSTD = NONE -#GRID_STAT_OUTPUT_FLAG_PJC = NONE -#GRID_STAT_OUTPUT_FLAG_PRC = NONE -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT -GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensmean.conf b/parm/metplus/GridStat_ensmean.conf new file mode 100644 index 0000000000..6bbc20e3f8 --- /dev/null +++ b/parm/metplus/GridStat_ensmean.conf @@ -0,0 +1,662 @@ +# Ensemble mean {{MetplusToolName}} METplus Configuration + +[config] + +# List of applications (tools) to run. +PROCESS_LIST = {{MetplusToolName}} + +# time looping - options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +LOOP_BY = INIT + +# Format of INIT_BEG and INIT_END using % items +# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. +# see www.strftime.org for more information +# %Y%m%d%H expands to YYYYMMDDHH +INIT_TIME_FMT = %Y%m%d%H + +# Start time for METplus run - must match INIT_TIME_FMT +INIT_BEG = {{cdate}} + +# End time for METplus run - must match INIT_TIME_FMT +INIT_END = {{cdate}} + +# Increment between METplus runs (in seconds if no units are specified). +# Must be >= 60 seconds. +INIT_INCREMENT = 3600 + +# List of forecast leads to process for each run time (init or valid) +# In hours if units are not specified +# If unset, defaults to 0 (don't loop through forecast leads) +LEAD_SEQ = {{fhr_list}} +# +# Order of loops to process data - Options are times, processes +# Not relevant if only one item is in the PROCESS_LIST +# times = run all wrappers in the PROCESS_LIST for a single run time, then +# increment the run time and run all wrappers again until all times have +# been evaluated. +# processes = run the first wrapper in the PROCESS_LIST for all times +# specified, then repeat for the next item in the PROCESS_LIST until all +# wrappers have been run +# +LOOP_ORDER = times +# +# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. +# +LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} +# +# Specify the name of the METplus log file. +# +LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} +# +# Specify the location and name of the final METplus conf file. +# +METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}} +# +# Location of MET configuration file to pass to {{MetplusToolName}}. +# +# References PARM_BASE, which is the location of the parm directory +# corresponding to the ush directory of the run_metplus.py script that +# is called or the value of the environment variable METPLUS_PARM_BASE +# if set. +# +{{METPLUS_TOOL_NAME}}_CONFIG_FILE = {PARM_BASE}/met_config/{{MetplusToolName}}Config_wrapped + +# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary +# See MET User's Guide for more information +{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = FCST +{{METPLUS_TOOL_NAME}}_REGRID_VLD_THRESH = 0.5 +{{METPLUS_TOOL_NAME}}_REGRID_METHOD = BUDGET +{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2 +{{METPLUS_TOOL_NAME}}_REGRID_SHAPE = SQUARE + +#{{METPLUS_TOOL_NAME}}_INTERP_FIELD = BOTH +#{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = 1.0 +#{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = SQUARE +#{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = NEAREST +#{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 1 + +#{{METPLUS_TOOL_NAME}}_GRID_WEIGHT_FLAG = + +# +# Name to identify model (forecast) data in output. +# +# The variable MODEL is recorded in the stat files, and the data in +# these files is then plotted (e.g. using METViewer). Here, we add a +# suffix to MODEL that identifies the data as that for the ensemble +# mean. This makes it easier to identify each curve. +# +MODEL = {{vx_fcst_model_name}}_ensmean +# +# Name to identify observation data in output. +# +OBTYPE = {{obtype}} +# +# Value to enter under the DESC column in the output stat file. +# +{{METPLUS_TOOL_NAME}}_DESC = NA +# +# Overrides of MET configuration defaults. +# +{{METPLUS_TOOL_NAME}}_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; +# +# List of forecast and corresponding observation fields to process. +# +{#- +Import the file containing jinja macros. +#} +{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %} + +{#- +Set the probabilistic threshold to be used for the forecast field. If +necessary, this can be changed to be an input parameter in the calling +script instead of a hard-coded value as below. +#} +{%- set thresh_fcst_prob = '==0.1' %} + +{#- +Jinja requires certain variables to be defined globally within the template +before they can be used in if-statements and other scopes (see Jinja scoping +rules). Define such variables. +#} +{%- set indx_level_fcst = '' %} +{%- set indx_input_thresh_fcst = '' %} +{%- set error_msg = '' %} +{%- set opts_indent = '' %} +{%- set opts_indent_len = '' %} +{%- set tmp = '' %} + +{%- set field_fcst = '' %} +{%- set field_obs = '' %} +{%- set level_fcst = '' %} +{%- set level_obs = '' %} +{%- set thresh_fcst = '' %} +{%- set thresh_obs = '' %} + +{%- set threshes_fcst = [] %} +{%- set threshes_obs = [] %} + +{#- +Get the set of valid field groups and ensure that the specified input +field group appears in this list. +#} +{%- set valid_field_groups = vx_config_dict.keys()|list %} +{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }} + +{#- +Reset the input forecast level so that if it happens to be an accumulation +(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g. +reset to 'A3'). +#} +{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} + +{#- +Extract from the configuration dictionary the set (which itself is a +dictionary) of fields, levels, and thresholds corresponding to the input +field group. Then set the delimiter string that separates forecast and +observation values in the various items (i.e. dictionary keys and values +representing field names, levels, and thresholds) in this dictionary. +#} +{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %} +{%- set delim_str = metplus_macros.set_delim_str() %} + +{#- +Some fields in the specified field group (input_field_group) may need to +be excluded from the METplus config file because calculating means for +them doesn't make sense. List these (for each input_field_group) in the +following dictionary. +#} +{%- set fields_fcst_to_exclude_by_field_group = + {'APCP': [], + 'ASNOW': [], + 'REFC': [], + 'RETOP': [], + 'ADPSFC': ['TCDC', 'VIS', 'HGT'], + 'ADPUPA': []} %} +{%- set fields_fcst_to_exclude = fields_fcst_to_exclude_by_field_group[input_field_group] %} + +{#- +Remove from the dictionary fields_levels_threshes_cpld any fields that +are in the list to be excluded. +#} +{%- for field_cpld in fields_levels_threshes_cpld.copy() %} + + {%- if delim_str in field_cpld %} + {%- set field_fcst, field_obs = field_cpld.split(delim_str) %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- set field_obs = field_cpld %} + {%- endif %} + + {%- if field_fcst in fields_fcst_to_exclude %} + {%- set tmp = fields_levels_threshes_cpld.pop(field_cpld) %} + {%- endif %} + +{%- endfor %} + +{#- +Loop over the fields and set field names, levels, thresholds, and/or +options for each field, both for forecasts and for observations, in the +METplus configuration file. +#} +{%- set ns = namespace(var_count = 0) %} +{%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %} + + {%- if delim_str in field_cpld %} + {%- set field_fcst, field_obs = field_cpld.split(delim_str) %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- set field_obs = field_cpld %} + {%- endif %} + +{#- +For convenience, create lists of valid forecast and observation levels +for the current field. +#} + {%- set valid_levels_fcst = [] %} + {%- set valid_levels_obs = [] %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + {%- set tmp = valid_levels_fcst.append(level_fcst) %} + {%- set tmp = valid_levels_obs.append(level_obs) %} + {%- endfor %} + +{#- +Make sure that the input forecast level (input_level_fcst) is set to a +valid value. +#} + {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %} + {%- set error_msg = '\n' ~ +'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~ +'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~ +'for the current forecast field (field_fcst). This is not the case:\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~ +' input_level_fcst = ' ~ input_level_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + +{#- +Loop over the (coupled) levels and corresponding lists of thresholds. +Extract from these the level values for forecasts and observations and +use them to set the forecast and observation field names, levels, +thresholds, and/or options in the METplus configuration file. +#} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + + {%- set valid_threshes_fcst = [] %} + {%- set valid_threshes_obs = [] %} + {%- for thresh_cpld in threshes_cpld %} + {%- if delim_str in thresh_cpld %} + {%- set thresh_fcst, thresh_obs = thresh_cpld.split(delim_str) %} + {%- else %} + {%- set thresh_fcst = thresh_cpld %} + {%- set thresh_obs = thresh_cpld %} + {%- endif %} + {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %} + {%- set tmp = valid_threshes_obs.append(thresh_obs) %} + {%- endfor %} + + {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %} +{#- +Increment the METplus variable counter. +#} + {%- set ns.var_count = ns.var_count+1 %} + +{#- +Set forecast field name. Note that this has to exactly match the name +of the field in the input forecast file(s). + +The input forecast files are generated by the MET/METplus GenEnsProd +tool. That tool adds the field's level to the variable names in its +output file to ensure that all variables in the file have distinct names. +For example, if the same field, say APCP, is output at two different +levels, say at A3 and A6 (for APCP, "levels" are really accumulation +periods), there need to be two variables in the output file, and they +obviously can't both be named "APCP", so GenEnsProd names one "APCP_A3" +and the other "APCP_A6". Here, the level is stored in the variable +level_fcst and, below, is included in the name of the forecast field. + +For accumulated fields, the field name in the input forecast file contains +TWO references to the accumulation period. The first is the level of the +forecast field added by GenEnsProd as described above. The second is +another reference to this same level (accumulation period) but added by +the MET/METplus's PcpCombine tool (whose output file is the input into +GenEnsProd). PcpCombine adds this reference to the level (really the +accumulation period) to the field's name for the same reason that +GenEnsProd does, i.e. to ensure that the names of variables in the output +file are distinct. Here, this accumulation period is stored in the +variable accum_hh. Thus, for accumulated fields, below we add both +accum_hh and level_fcst to the field name to get an exact field name +match. +#} + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{accum_hh}}_{{level_fcst}}_ENS_MEAN + {%- else %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{level_fcst}}_ENS_MEAN + {%- endif %} + +{#- +Set forecast field level. +#} +FCST_VAR{{ns.var_count}}_LEVELS = {{level_fcst}} + +{#- +Set forecast field threshold(s). Note that no forecast thresholds are +included in the METplus configuration file if input_thresh_fcst is set +to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +If input_thresh_fcst is set to 'all', set the list of forecast thresholds +to the full set of valid values. +#} + {%- if (input_thresh_fcst == 'all') %} + + {%- set threshes_fcst = valid_threshes_fcst %} +{#- +If input_thresh_fcst is set to a specific value: +* If that value is valid, i.e. it exists in the list of valid forecast + thresholds, get its index in that list and use it to set the forecast + threshold to a one-element list containing that value. Note that the + index will be needed later below when setting the observation threshold(s). +* If the input forecast threshold is not valid, print out a warning message + and exit. +#} + {%- else %} + + {%- if input_thresh_fcst in valid_threshes_fcst %} + {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %} + {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %} + {%- else %} + {%- set error_msg = '\n' ~ +'For the current forecast field (field_fcst) and forecast level (level_fcst),\n' ~ +'the input forecast threshold (input_thresh_fcst) does not exist in the list\n' ~ +'of valid forecast thresholds (valid_threshes_fcst):\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' level_fcst = ' ~ level_fcst ~ '\n' ~ +' valid_threshes_fcst = ' ~ valid_threshes_fcst ~ '\n' ~ +' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + + {%- endif %} +{#- +If threshes_fcst has been reset to something other than its default +value of an empty list, then set the forecast thresholds in the METplus +configuration file because that implies threshes_fcst was set above to +a non-empty value. Then reset threshes_fcst to its default value for +proper processing of thresholds for the next field. +#} + {%- if (threshes_fcst != []) %} +FCST_VAR{{ns.var_count}}_THRESH = {{threshes_fcst|join(', ')}} + {%- endif %} + {%- set threshes_fcst = [] %} + + {%- endif %} + +{#- +Set forecast field options. +#} + {%- set opts_indent_len = 20 %} + {%- if (ns.var_count > 9) and (ns.var_count <= 99) %} + {%- set opts_indent_len = opts_indent_len + 1 %} + {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %} + {%- set opts_indent_len = opts_indent_len + 2 %} + {%- elif (ns.var_count > 999) %} + {%- set opts_indent_len = opts_indent_len + 3 %} + {%- endif %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'ADPUPA' %} + + {%- if field_fcst == 'CAPE' %} +FCST_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; + {%- endif %} + + {%- endif %} + +{#- +Set observation field name. Note that this has to exactly match the name +of the field in the input observation file. + +For accumulated fields, the input observation file is generated by MET's +PcpCombine tool. In that file, the field name consists of the observation +field name here (field_obs) with the accumulation period appended to it +(separated by an underscore), so we must do the same here to get an exact +match. + +Note: +Turns out for ASNOW, PcpCombine is not run for obs, so we exclude that +from the "if" clause here (so it goes into the "else"). For workflow +behavior uniformity between APCP and ASNOW, consider running PcpCombine +for ASNOW observations as well (just as it's run for APCP observations). + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +#} + {%- if (input_field_group in ['APCP']) %} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}}_{{accum_hh}} + {%- else %} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}} + {%- endif %} + +{#- +Set observation field level. +#} + {%- set indx_level_fcst = valid_levels_fcst.index(level_fcst) %} + {%- set level_obs = valid_levels_obs[indx_level_fcst] %} +OBS_VAR{{ns.var_count}}_LEVELS = {{level_obs}} + +{#- +Set observation field threshold(s). Note that no observation thresholds +are included in the METplus configuration file if input_thresh_fcst is +set to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +If input_thresh_fcst is set to 'all', set the list of observation thresholds +to the full set of valid values. +#} + {%- if (input_thresh_fcst == 'all') %} + + {%- set threshes_obs = valid_threshes_obs %} +{#- +If input_thresh_fcst is set to a specific forecast threshold, then the +observation threshold is given by the element in the list of valid +observation thresholds that has the same index as that of input_thresh_fcst +in the list of valid forecast thresholds. +#} + {%- else %} + {%- set threshes_obs = [valid_threshes_obs[indx_input_thresh_fcst]] %} + {%- endif %} +{#- +If threshes_obs has been reset to something other than its default value +of an empty list, then set the observation thresholds in the METplus +configuration file because that implies threshes_obs was set above to +a non-empty value. Then reset threshes_obs to its default value for +proper processing of thresholds for the next field. +#} + {%- if (threshes_obs != []) %} +OBS_VAR{{ns.var_count}}_THRESH = {{threshes_obs|join(', ')}} + {%- endif %} + {%- set threshes_obs = [] %} + + {%- endif %} + +{#- +Set observation field options. +#} + {%- set opts_indent_len = opts_indent_len - 1 %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'ASNOW' %} + + {%- if field_obs == 'ASNOW' %} +OBS_VAR{{ns.var_count}}_OPTIONS = convert(x) = 100.0*x; + {%- endif %} + + {%- elif input_field_group == 'ADPUPA' %} + + {%- if field_obs == 'CAPE' %} +OBS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; +{{opts_indent}}cnt_logic = UNION; + {%- elif field_obs == 'PBL' %} +OBS_VAR{{ns.var_count}}_OPTIONS = desc = "TKE"; + {%- endif %} + + {%- endif %} + +{#- +Print out a newline to separate the settings for the current field (both +forecast and observation settings) from those for the next field. +#} + {{- '\n' }} + + {%- endif %} + + {%- endfor %} +{%- endfor %} +# +# Forecast data time window(s). +# +FCST_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_BEGIN = 0 +FCST_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_END = 0 +# +# Observation data time window(s). +# +OBS_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_BEGIN = 0 +OBS_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_END = 0 + +# MET {{MetplusToolName}} neighborhood values +# See the MET User's Guide {{MetplusToolName}} section for more information +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_FIELD = BOTH + +# width value passed to nbrhd dictionary in the MET config file +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_WIDTH = 3,5,7 + +# shape value passed to nbrhd dictionary in the MET config file +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_SHAPE = SQUARE + +# cov thresh list passed to nbrhd dictionary in the MET config file +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_COV_THRESH = >=0.5 + +# Set to true to run {{MetplusToolName}} separately for each field specified +# Set to false to create one run of {{MetplusToolName}} per run time that +# includes all fields specified. +{{METPLUS_TOOL_NAME}}_ONCE_PER_FIELD = False +# +# Set to true if forecast data is probabilistic. +# +FCST_IS_PROB = False +# +# Only used if FCST_IS_PROB is true - sets probabilistic threshold +# +FCST_{{METPLUS_TOOL_NAME}}_PROB_THRESH = ==0.1 + +{{METPLUS_TOOL_NAME}}_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} + +# Climatology data +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FILE_NAME = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FIELD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_WIDTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_SHAPE = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_MATCH_MONTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_DAY_INTERVAL = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_HOUR_INTERVAL = + +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FILE_NAME = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FIELD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_WIDTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_SHAPE = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_TIME_INTERP_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_MATCH_MONTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_DAY_INTERVAL = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_HOUR_INTERVAL = + +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1 +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = True + +{{METPLUS_TOOL_NAME}}_MASK_GRID = + +# Statistical output types +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_FHO = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTC = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTS = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTC = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTS = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CNT = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SL1L2 = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SAL1L2 = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VL1L2 = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VAL1L2 = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VCNT = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PCT = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PSTD = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PJC = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PRC = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV = BOTH +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCTC = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCTS = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCNT = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_GRAD = BOTH +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_DMAP = NONE + +# NetCDF matched pairs output file +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_VAR_NAME = +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_LATLON = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_RAW = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_DIFF = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_CLIMO = FALSE +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_CLIMO_CDP = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_WEIGHT = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_NBRHD = FALSE +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_FOURIER = FALSE +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_GRADIENT = FALSE +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_APPLY_MASK = FALSE + +# End of [config] section and start of [dir] section. +[dir] +# +# Directory containing observation input to {{MetplusToolName}}. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{obs_input_dir}} +# +# Directory containing forecast input to {{MetplusToolName}}. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}} +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR = +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR = +# +# Directory in which to write output from {{MetplusToolName}}. +# +# OUTPUT_BASE apparently has to be set to something; it cannot be left +# to its default value. But it is not explicitly used elsewhere in this +# configuration file. +# +OUTPUT_BASE = {{output_base}} +{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}} +# +# Directory for staging data. +# +STAGING_DIR = {{staging_dir}} + +# End of [dir] section and start of [filename_templates] section. +[filename_templates] +# +# Template for observation input to {{MetplusToolName}} relative to +# OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{obs_input_fn_template}} +# +# Template for forecast input to {{MetplusToolName}} relative to +# FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}} +# +# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR. +# +{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_TEMPLATE = +# +# Variable used to specify one or more verification mask files for +# {{MetplusToolName}}. Not used for this example. +# +{{METPLUS_TOOL_NAME}}_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensmean_APCP.conf b/parm/metplus/GridStat_ensmean_APCP.conf deleted file mode 100644 index 6d3956c8e6..0000000000 --- a/parm/metplus/GridStat_ensmean_APCP.conf +++ /dev/null @@ -1,282 +0,0 @@ -# Ensemble mean GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -#GRID_STAT_INTERP_FIELD = BOTH -#GRID_STAT_INTERP_VLD_THRESH = 1.0 -#GRID_STAT_INTERP_SHAPE = SQUARE -#GRID_STAT_INTERP_TYPE_METHOD = NEAREST -#GRID_STAT_INTERP_TYPE_WIDTH = 1 - -#GRID_STAT_GRID_WEIGHT_FLAG = - -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as that for the ensemble -# mean. This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensmean -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# List of forecast and corresponding observation fields to process. -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_MEAN -FCST_VAR1_LEVELS = A{{accum_hh}} -FCST_VAR1_THRESH = {{field_thresholds}} -OBS_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR1_LEVELS = A{{accum_hh}} -OBS_VAR1_THRESH = {{field_thresholds}} - -# -# Forecast data time window(s). -# -FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = 0 -OBS_GRID_STAT_FILE_WINDOW_END = 0 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = BOTH - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7 - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = STAT -GRID_STAT_OUTPUT_FLAG_CTC = STAT -GRID_STAT_OUTPUT_FLAG_CTS = STAT -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = STAT -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -#GRID_STAT_OUTPUT_FLAG_PCT = NONE -#GRID_STAT_OUTPUT_FLAG_PSTD = NONE -#GRID_STAT_OUTPUT_FLAG_PJC = NONE -#GRID_STAT_OUTPUT_FLAG_PRC = NONE -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT -GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensmean_ASNOW.conf b/parm/metplus/GridStat_ensmean_ASNOW.conf deleted file mode 100644 index 6fb8951a3f..0000000000 --- a/parm/metplus/GridStat_ensmean_ASNOW.conf +++ /dev/null @@ -1,287 +0,0 @@ -# Ensemble mean GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -#GRID_STAT_INTERP_FIELD = BOTH -#GRID_STAT_INTERP_VLD_THRESH = 1.0 -#GRID_STAT_INTERP_SHAPE = SQUARE -#GRID_STAT_INTERP_TYPE_METHOD = NEAREST -#GRID_STAT_INTERP_TYPE_WIDTH = 1 - -#GRID_STAT_GRID_WEIGHT_FLAG = - -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as that for the ensemble -# mean. This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensmean -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# List of forecast and corresponding observation fields to process. -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_MEAN -FCST_VAR1_LEVELS = A{{accum_hh}} -FCST_VAR1_THRESH = {{field_thresholds}} -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = A{{accum_hh}} -OBS_VAR1_THRESH = {{field_thresholds}} -OBS_VAR1_OPTIONS = convert(x) = 100.0*x; -# -# Forecast data time window(s). -# -FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = 0 -OBS_GRID_STAT_FILE_WINDOW_END = 0 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = BOTH - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7 - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = STAT -GRID_STAT_OUTPUT_FLAG_CTC = STAT -GRID_STAT_OUTPUT_FLAG_CTS = STAT -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = STAT -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -#GRID_STAT_OUTPUT_FLAG_PCT = NONE -#GRID_STAT_OUTPUT_FLAG_PSTD = NONE -#GRID_STAT_OUTPUT_FLAG_PJC = NONE -#GRID_STAT_OUTPUT_FLAG_PRC = NONE -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT -GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensmean_REFC.conf b/parm/metplus/GridStat_ensmean_REFC.conf deleted file mode 100644 index 451c82dfd5..0000000000 --- a/parm/metplus/GridStat_ensmean_REFC.conf +++ /dev/null @@ -1,313 +0,0 @@ -# Ensemble mean GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -GRID_STAT_INTERP_FIELD = NONE -GRID_STAT_INTERP_VLD_THRESH = 1.0 -GRID_STAT_INTERP_SHAPE = SQUARE -GRID_STAT_INTERP_TYPE_METHOD = NEAREST -GRID_STAT_INTERP_TYPE_WIDTH = 1 - -GRID_STAT_GRID_WEIGHT_FLAG = NONE -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as that for the ensemble -# mean. This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensmean -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# String to search for in the foreast input files for forecast variable -# 1. -# -# Note: -# This is the name of the field in the NetCDF file(s) created by MET's -# gen_ens_prod tool. This tool reads in the grib2 file(s) (in this case -# of forecasts) and outputs NetCDF file(s) in which the array names -# consist of the value of fieldname_in_met_output plus a suffix that -# specifies additional properties of the data in the array such as the -# level, the type of statistic, etc. In this case, this suffix is -# "_L0_ENS_MEAN". Thus, below, FCST_VAR1_NAME must be set to the value -# of fieldname_in_met_output with "_L0_ENS_MEAN" appended to it. -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_L0_ENS_MEAN -FCST_VAR1_LEVELS = L0 -FCST_VAR1_THRESH = {{field_thresholds}} -# -# String to search for in the observation input files for observation -# variable 1. -# -# Note: -# This is the name of the field in the grib2 observation file. Thus, -# it should not be set to {{fieldname_in_met_output}} because the -# value of fieldname_in_met_output is in general not the same as the -# name of the field in the grib2 observation file (although it can be -# for certain fields). If you do and it doesn't match, you may get an -# error like this from METplus: -# ERROR : VarInfoGrib2::set_dict() -> unrecognized GRIB2 field abbreviation ... -# -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = Z500 -OBS_VAR1_THRESH = {{field_thresholds}} -OBS_VAR1_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; -# -# Forecast data time window(s). -# -#FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -#FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300 -OBS_GRID_STAT_FILE_WINDOW_END = 300 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = BOTH - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7 - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = STAT -GRID_STAT_OUTPUT_FLAG_CTC = STAT -GRID_STAT_OUTPUT_FLAG_CTS = STAT -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = STAT -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -#GRID_STAT_OUTPUT_FLAG_PCT = NONE -#GRID_STAT_OUTPUT_FLAG_PSTD = NONE -#GRID_STAT_OUTPUT_FLAG_PJC = NONE -#GRID_STAT_OUTPUT_FLAG_PRC = NONE -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT -GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensmean_RETOP.conf b/parm/metplus/GridStat_ensmean_RETOP.conf deleted file mode 100644 index a881ed3ab5..0000000000 --- a/parm/metplus/GridStat_ensmean_RETOP.conf +++ /dev/null @@ -1,315 +0,0 @@ -# Ensemble mean GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -GRID_STAT_INTERP_FIELD = NONE -GRID_STAT_INTERP_VLD_THRESH = 1.0 -GRID_STAT_INTERP_SHAPE = SQUARE -GRID_STAT_INTERP_TYPE_METHOD = NEAREST -GRID_STAT_INTERP_TYPE_WIDTH = 1 - -GRID_STAT_GRID_WEIGHT_FLAG = NONE -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as that for the ensemble -# mean. This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensmean -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# String to search for in the foreast input files for forecast variable -# 1. -# -# Note: -# This is the name of the field in the NetCDF file(s) created by MET's -# gen_ens_prod tool. This tool reads in the grib2 file(s) (in this case -# of forecasts) and outputs NetCDF file(s) in which the array names -# consist of the value of fieldname_in_met_output plus a suffix that -# specifies additional properties of the data in the array such as the -# level, the type of statistic, etc. In this case, this suffix is -# "_L0_ENS_MEAN". Thus, below, FCST_VAR1_NAME must be set to the value -# of fieldname_in_met_output with "_L0_ENS_MEAN" appended to it. -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_L0_ENS_MEAN -FCST_VAR1_LEVELS = L0 -FCST_VAR1_THRESH = {{field_thresholds}} -FCST_VAR1_OPTIONS = convert(x) = x * 3.28084 * 0.001; -# -# String to search for in the observation input files for observation -# variable 1. -# -# Note: -# This is the name of the field in the grib2 observation file. Thus, -# it should not be set to {{fieldname_in_met_output}} because the -# value of fieldname_in_met_output is in general not the same as the -# name of the field in the grib2 observation file (although it can be -# for certain fields). If you do and it doesn't match, you may get an -# error like this from METplus: -# ERROR : VarInfoGrib2::set_dict() -> unrecognized GRIB2 field abbreviation ... -# -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = Z500 -OBS_VAR1_THRESH = {{field_thresholds}} -OBS_VAR1_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; -# -# Forecast data time window(s). -# -#FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -#FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300 -OBS_GRID_STAT_FILE_WINDOW_END = 300 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = BOTH - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7 - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = STAT -GRID_STAT_OUTPUT_FLAG_CTC = STAT -GRID_STAT_OUTPUT_FLAG_CTS = STAT -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = STAT -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -#GRID_STAT_OUTPUT_FLAG_PCT = NONE -#GRID_STAT_OUTPUT_FLAG_PSTD = NONE -#GRID_STAT_OUTPUT_FLAG_PJC = NONE -#GRID_STAT_OUTPUT_FLAG_PRC = NONE -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = STAT -GRID_STAT_OUTPUT_FLAG_NBRCTS = STAT -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -#GRID_STAT_NC_PAIRS_FLAG_CLIMO_CDP = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensprob.conf b/parm/metplus/GridStat_ensprob.conf new file mode 100644 index 0000000000..a43b8ed340 --- /dev/null +++ b/parm/metplus/GridStat_ensprob.conf @@ -0,0 +1,653 @@ +# Ensemble probabilistic {{MetplusToolName}} METplus Configuration + +[config] + +# List of applications (tools) to run. +PROCESS_LIST = {{MetplusToolName}} + +# time looping - options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +LOOP_BY = INIT + +# Format of INIT_BEG and INIT_END using % items +# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. +# see www.strftime.org for more information +# %Y%m%d%H expands to YYYYMMDDHH +INIT_TIME_FMT = %Y%m%d%H + +# Start time for METplus run - must match INIT_TIME_FMT +INIT_BEG = {{cdate}} + +# End time for METplus run - must match INIT_TIME_FMT +INIT_END = {{cdate}} + +# Increment between METplus runs (in seconds if no units are specified). +# Must be >= 60 seconds. +INIT_INCREMENT = 3600 + +# List of forecast leads to process for each run time (init or valid) +# In hours if units are not specified +# If unset, defaults to 0 (don't loop through forecast leads) +LEAD_SEQ = {{fhr_list}} +# +# Order of loops to process data - Options are times, processes +# Not relevant if only one item is in the PROCESS_LIST +# times = run all wrappers in the PROCESS_LIST for a single run time, then +# increment the run time and run all wrappers again until all times have +# been evaluated. +# processes = run the first wrapper in the PROCESS_LIST for all times +# specified, then repeat for the next item in the PROCESS_LIST until all +# wrappers have been run +# +LOOP_ORDER = times +# +# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. +# +LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} +# +# Specify the name of the METplus log file. +# +LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} +# +# Specify the location and name of the final METplus conf file. +# +METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}} +# +# Location of MET configuration file to pass to {{MetplusToolName}}. +# +# References PARM_BASE, which is the location of the parm directory +# corresponding to the ush directory of the run_metplus.py script that +# is called or the value of the environment variable METPLUS_PARM_BASE +# if set. +# +{{METPLUS_TOOL_NAME}}_CONFIG_FILE = {PARM_BASE}/met_config/{{MetplusToolName}}Config_wrapped + +# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary +# See MET User's Guide for more information +{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = FCST +{{METPLUS_TOOL_NAME}}_REGRID_VLD_THRESH = 0.5 +{{METPLUS_TOOL_NAME}}_REGRID_METHOD = BUDGET +{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2 +{{METPLUS_TOOL_NAME}}_REGRID_SHAPE = SQUARE + +{%- if input_field_group in ['APCP', 'ASNOW'] %} + +#{{METPLUS_TOOL_NAME}}_INTERP_FIELD = BOTH +#{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = 1.0 +#{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = SQUARE +#{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = NEAREST +#{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 1 + +#{{METPLUS_TOOL_NAME}}_GRID_WEIGHT_FLAG = +{%- elif input_field_group in ['REFC', 'RETOP'] %} + +{{METPLUS_TOOL_NAME}}_INTERP_FIELD = NONE +{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = 1.0 +{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = SQUARE +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = NEAREST +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 1 + +{{METPLUS_TOOL_NAME}}_GRID_WEIGHT_FLAG = NONE +{%- endif %} +# +# Name to identify model (forecast) data in output. +# +# The variable MODEL is recorded in the stat files, and the data in +# these files is then plotted (e.g. using METViewer). Here, we add a +# suffix to MODEL that identifies the data as ensemble-probabilistic. +# This makes it easier to identify each curve. +# +MODEL = {{vx_fcst_model_name}}_ensprob +# +# Name to identify observation data in output. +# +OBTYPE = {{obtype}} +# +# Value to enter under the DESC column in the output stat file. +# +{{METPLUS_TOOL_NAME}}_DESC = NA +# +# Overrides of MET configuration defaults. +# +{{METPLUS_TOOL_NAME}}_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; +# +# List of forecast and corresponding observation fields to process. +# +{#- +Import the file containing jinja macros. +#} +{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %} + +{#- +Set the probabilistic threshold to be used for the forecast field. If +necessary, this can be changed to be an input parameter in the calling +script instead of a hard-coded value as below. +#} +{%- set thresh_fcst_prob = '==0.1' %} + +{#- +Jinja requires certain variables to be defined globally within the template +before they can be used in if-statements and other scopes (see Jinja +scoping rules). Define such variables. +#} +{%- set indx_level_fcst = '' %} +{%- set indx_thresh_fcst = '' %} +{%- set error_msg = '' %} +{%- set opts_indent = '' %} +{%- set opts_indent_len = '' %} + +{%- set field_fcst = '' %} +{%- set field_obs = '' %} +{%- set level_fcst = '' %} +{%- set level_obs = '' %} +{%- set thresh_fcst = '' %} +{%- set thresh_obs = '' %} +{%- set thresh_fcst_and_or = '' %} + +{#- +Get the set of valid field groups and ensure that the specified input +field group appears in this list. +#} +{%- set valid_field_groups = vx_config_dict.keys()|list %} +{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }} + +{#- +Reset the input forecast level so that if it happens to be an accumulation +(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g. +reset to 'A3'). +#} +{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} + +{#- +Extract from the configuration dictionary the set (which itself is a +dictionary) of fields, levels, and thresholds corresponding to the input +field group. Then set the delimiter string that separates forecast and +observation values in the various items (i.e. dictionary keys and values +representing field names, levels, and thresholds) in this dictionary. +#} +{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %} +{%- set delim_str = metplus_macros.set_delim_str() %} + +{#- +Loop over the fields and set field names, levels, thresholds, and/or +options for each field, both for forecasts and for observations, in the +METplus configuration file. +#} +{%- set ns = namespace(var_count = 0) %} +{#- +Loop over each field twice, the first treating the forecast field as +probabilistic and the second time as a scalar. +#} +{%- for treat_fcst_as_prob in [True, False] %} + + {%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %} + + {%- if delim_str in field_cpld %} + {%- set field_fcst, field_obs = field_cpld.split(delim_str) %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- set field_obs = field_cpld %} + {%- endif %} + +{#- +For convenience, create lists of valid forecast and observation levels +for the current field. +#} + {%- set valid_levels_fcst = [] %} + {%- set valid_levels_obs = [] %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + {%- set tmp = valid_levels_fcst.append(level_fcst) %} + {%- set tmp = valid_levels_obs.append(level_obs) %} + {%- endfor %} + +{#- +Make sure that the input forecast level (input_level_fcst) is set to a +valid value. +#} + {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %} + {%- set error_msg = '\n' ~ +'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~ +'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~ +'for the current forecast field (field_fcst). This is not the case:\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~ +' input_level_fcst = ' ~ input_level_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + +{#- +Add comment depending on whether or not the field is being treated +probabilistically. +#} + {%- if treat_fcst_as_prob %} +# FREQ +# Process as probability +# + {%- else %} +# +#Process as scalars for neighborhood methods +## Note that the number of forecast and obs thresholds must match +## but won't actually be applied to NBRCNT outputs with "nbrhd.field = OBS;" +# + {%- endif %} + +{#- +Loop over the (coupled) levels and corresponding lists of thresholds. +Extract from these the level values for forecasts and observations and +use them to set the forecast and observation field names, levels, +thresholds, and/or options in the METplus configuration file. +#} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + + {%- set valid_threshes_fcst = [] %} + {%- set valid_threshes_obs = [] %} + {%- for thresh_cpld in threshes_cpld %} + {%- if delim_str in thresh_cpld %} + {%- set thresh_fcst, thresh_obs = thresh_cpld.split(delim_str) %} + {%- else %} + {%- set thresh_fcst = thresh_cpld %} + {%- set thresh_obs = thresh_cpld %} + {%- endif %} + {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %} + {%- set tmp = valid_threshes_obs.append(thresh_obs) %} + {%- endfor %} + + {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %} + + {%- for thresh_fcst in valid_threshes_fcst %} + + {%- if (input_thresh_fcst == 'all') or (input_thresh_fcst == thresh_fcst) %} +{#- +Increment the METplus variable counter. +#} + {%- set ns.var_count = ns.var_count+1 %} + +{#- +Set forecast field name. Note that this has to exactly match the name +of the field in the input forecast file(s). + +The input forecast files are generated by the MET/METplus GenEnsProd +tool. That tool adds the field's level to the variable names in its +output file to ensure that all variables in the file have distinct names. +For example, if the same field, say APCP, is output at two different +levels, say at A3 and A6 (for APCP, "levels" are really accumulation +periods), there need to be two variables in the output file, and they +obviously can't both be named "APCP", so GenEnsProd names one "APCP_A3" +and the other "APCP_A6". Here, the level is stored in the variable +level_fcst and, below, is included in the name of the forecast field. + +For accumulated fields, the field name in the input forecast file contains +TWO references to the accumulation period. The first is the level of the +forecast field added by GenEnsProd as described above. The second is +another reference to this same level (accumulation period) but added by +the MET/METplus's PcpCombine tool (whose output file is the input into +GenEnsProd). PcpCombine adds this reference to the level (really the +accumulation period) to the field's name for the same reason that +GenEnsProd does, i.e. to ensure that the names of variables in the output +file are distinct. Here, this accumulation period is stored in the +variable accum_hh. Thus, for accumulated fields, below we add both +accum_hh and level_fcst to the field name to get an exact field name +match. +#} + {%- set thresh_fcst_and_or = thresh_fcst|replace("&&", ".and.") %} + {%- set thresh_fcst_and_or = thresh_fcst_and_or|replace("||", ".or.") %} + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{accum_hh}}_{{level_fcst}}_ENS_FREQ_{{thresh_fcst_and_or}} + {%- else %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{level_fcst}}_ENS_FREQ_{{thresh_fcst_and_or}} + {%- endif %} + +{#- +Set forecast field level. +#} +FCST_VAR{{ns.var_count}}_LEVELS = {{level_fcst}} + +{#- +Set forecast field threshold. +Note that since the forecast field being read in is actually a field of +probabilities, we set the forecast threshold to a probabilistic one +(thresh_fcst_prob) and not to the physical threshold (thresh_fcst) in +the dictionary of forecast field names, levels, and thresholds that we +are looping over. +#} +FCST_VAR{{ns.var_count}}_THRESH = {{thresh_fcst_prob}} + +{#- +Set forecast field options. +#} + {%- set opts_indent_len = 20 %} + {%- if (ns.var_count > 9) and (ns.var_count <= 99) %} + {%- set opts_indent_len = opts_indent_len + 1 %} + {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %} + {%- set opts_indent_len = opts_indent_len + 2 %} + {%- elif (ns.var_count > 999) %} + {%- set opts_indent_len = opts_indent_len + 3 %} + {%- endif %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if not treat_fcst_as_prob %} +FCST_VAR{{ns.var_count}}_OPTIONS = prob = FALSE; + {%- endif %} + +{#- +Set observation field name. Note that this has to exactly match the name +of the field in the input observation file. + +For accumulated fields, the input observation file is generated by MET's +PcpCombine tool. In that file, the field name consists of the observation +field name here (field_obs) with the accumulation period appended to it +(separated by an underscore), so we must do the same here to get an exact +match. + +Note: +Turns out for ASNOW, PcpCombine is not run for obs, so we exclude that +from the "if" clause here (so it goes into the "else"). For workflow +behavior uniformity between APCP and ASNOW, consider running PcpCombine +for ASNOW observations as well (just as it's run for APCP observations). + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +#} + {%- if (input_field_group in ['APCP']) %} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}}_{{accum_hh}} + {%- else %} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}} + {%- endif %} + +{#- +Set observation field level. +#} + {%- set indx_level_fcst = valid_levels_fcst.index(level_fcst) %} + {%- set level_obs = valid_levels_obs[indx_level_fcst] %} +OBS_VAR{{ns.var_count}}_LEVELS = {{level_obs}} + +{#- +Set observation field threshold. Note that no observation thresholds +are included in the METplus configuration file if input_thresh_fcst is +set to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +Set the observation threshold. This is given by the element in the list +of valid observation thresholds that has the same index as that of the +current forcast threshold (thresh_fcst) in the list of valid forecast +thresholds. +#} + {%- set indx_thresh_fcst = valid_threshes_fcst.index(thresh_fcst) %} + {%- set thresh_obs = valid_threshes_obs[indx_thresh_fcst] %} +OBS_VAR{{ns.var_count}}_THRESH = {{thresh_obs}} + {%- endif %} + +{#- +Set observation field options. +#} + {%- set opts_indent_len = opts_indent_len - 1 %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'APCP' %} + + {%- if field_obs == 'APCP' %} + {%- if not treat_fcst_as_prob %} +OBS_VAR{{ns.var_count}}_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } + {%- endif %} + {%- endif %} + + {%- elif input_field_group == 'ASNOW' %} + + {%- if field_obs == 'ASNOW' %} + {%- if treat_fcst_as_prob %} +OBS_VAR{{ns.var_count}}_OPTIONS = convert(x) = 100.0*x; + {%- else %} +OBS_VAR{{ns.var_count}}_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }; +{{opts_indent}}convert(x) = 100.0*x; + {%- endif %} + {%- endif %} + + {%- elif input_field_group == 'REFC' %} + + {%- if field_obs == 'MergedReflectivityQCComposite' %} +OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = lt-20; +{{opts_indent}}censor_val = -20.0; +{{opts_indent}}cnt_thresh = [ >15 ]; +{{opts_indent}}cnt_logic = UNION; + {%- if not treat_fcst_as_prob %} +{{opts_indent}}nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } + {%- endif %} + {%- endif %} + + {%- elif input_field_group == 'RETOP' %} + + {%- if field_obs == 'EchoTop18' %} +OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = lt-20.0; +{{opts_indent}}censor_val = -20.0; +{{opts_indent}}cnt_thresh = [ >15 ]; +{{opts_indent}}cnt_logic = UNION; +{{opts_indent}}convert(x) = x * 3280.84 * 0.001; + {%- if not treat_fcst_as_prob %} +{{opts_indent}}nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } + {%- endif %} + {%- endif %} + + {%- endif %} + +{#- +Print out a newline to separate the settings for the current field (both +forecast and observation settings) from those for the next field. +#} + {{- '\n' }} + + {%- endif %} + {%- endfor %} + + {%- endif %} + + {%- endfor %} + {%- endfor %} +{%- endfor %} +# +# Forecast data time window(s). +# +{%- set comment_or_null = '' %} +{%- set obs_window_abs_val = '0' %} +{%- if input_field_group in ['REFC', 'RETOP'] %} + {%- set comment_or_null = '#' %} + {%- set obs_window_abs_val = '300' %} +{%- endif %} +{{comment_or_null}}FCST_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_BEGIN = 0 +{{comment_or_null}}FCST_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_END = 0 +# +# Observation data time window(s). +# +{#- +Use integers for seconds, but int can be changed to float if there is a +need to go to sub-seconds. +#} +OBS_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_BEGIN = {{ 0 - obs_window_abs_val|int }} +OBS_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_END = {{ obs_window_abs_val|int }} + +# MET {{MetplusToolName}} neighborhood values +# See the MET User's Guide {{MetplusToolName}} section for more information +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_FIELD = + +# width value passed to nbrhd dictionary in the MET config file +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_WIDTH = + +# shape value passed to nbrhd dictionary in the MET config file +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_SHAPE = + +# cov thresh list passed to nbrhd dictionary in the MET config file +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_COV_THRESH = >=0.5 + +# Set to true to run {{MetplusToolName}} separately for each field specified +# Set to false to create one run of {{MetplusToolName}} per run time that +# includes all fields specified. +{{METPLUS_TOOL_NAME}}_ONCE_PER_FIELD = False +# +# Set to true if forecast data is probabilistic. +# +FCST_IS_PROB = True +FCST_PROB_IN_GRIB_PDS = False +# +# Only used if FCST_IS_PROB is true - sets probabilistic threshold +# +FCST_{{METPLUS_TOOL_NAME}}_PROB_THRESH = {{thresh_fcst_prob}} + +{{METPLUS_TOOL_NAME}}_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} + +# Climatology data +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FILE_NAME = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_FIELD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_WIDTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_REGRID_SHAPE = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_MATCH_MONTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_DAY_INTERVAL = +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_HOUR_INTERVAL = + +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FILE_NAME = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_FIELD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_WIDTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_REGRID_SHAPE = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_TIME_INTERP_METHOD = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_MATCH_MONTH = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_DAY_INTERVAL = +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_HOUR_INTERVAL = + +{%- set comment_or_null = '' %} +{%- if input_field_group in ['APCP', 'ASNOW'] %} + {%- set comment_or_null = '#' %} +{%- endif %} + +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1 +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = True + +{{METPLUS_TOOL_NAME}}_MASK_GRID = + +# Statistical output types +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_FHO = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTC = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTS = NONE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTC = NONE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTS = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CNT = NONE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SL1L2 = NONE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SAL1L2 = NONE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VL1L2 = NONE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VAL1L2 = NONE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VCNT = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PCT = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PSTD = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PJC = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PRC = STAT +{%- if input_field_group in ['APCP', 'ASNOW'] %} +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV = BOTH +{%- elif input_field_group in ['REFC', 'RETOP'] %} +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV = NONE +{%- endif %} +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCTC = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCTS = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCNT = STAT +{%- if input_field_group in ['APCP', 'ASNOW'] %} +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_GRAD = BOTH +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_DMAP = NONE +{%- elif input_field_group in ['REFC', 'RETOP'] %} +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_GRAD = NONE +{%- endif %} + +# NetCDF matched pairs output file +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_VAR_NAME = +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_LATLON = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_RAW = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_DIFF = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_CLIMO = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_WEIGHT = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_NBRHD = FALSE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_FOURIER = FALSE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_GRADIENT = FALSE +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_APPLY_MASK = FALSE + +# End of [config] section and start of [dir] section. +[dir] +# +# Directory containing observation input to {{MetplusToolName}}. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{obs_input_dir}} +# +# Directory containing forecast input to {{MetplusToolName}}. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}} +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR = +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR = +# +# Directory in which to write output from {{MetplusToolName}}. +# +# OUTPUT_BASE apparently has to be set to something; it cannot be left +# to its default value. But it is not explicitly used elsewhere in this +# configuration file. +# +OUTPUT_BASE = {{output_base}} +{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}} +# +# Directory for staging data. +# +STAGING_DIR = {{staging_dir}} + +# End of [dir] section and start of [filename_templates] section. +[filename_templates] +# +# Template for observation input to {{MetplusToolName}} relative to +# OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{obs_input_fn_template}} +# +# Template for forecast input to {{MetplusToolName}} relative to +# FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}} +# +# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR. +# +{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_TEMPLATE = +# +# Variable used to specify one or more verification mask files for +# {{MetplusToolName}}. Not used for this example. +# +{{METPLUS_TOOL_NAME}}_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensprob_APCP.conf b/parm/metplus/GridStat_ensprob_APCP.conf deleted file mode 100644 index 3e16de248d..0000000000 --- a/parm/metplus/GridStat_ensprob_APCP.conf +++ /dev/null @@ -1,362 +0,0 @@ -# Ensemble probabilistic GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -#GRID_STAT_INTERP_FIELD = BOTH -#GRID_STAT_INTERP_VLD_THRESH = 1.0 -#GRID_STAT_INTERP_SHAPE = SQUARE -#GRID_STAT_INTERP_TYPE_METHOD = NEAREST -#GRID_STAT_INTERP_TYPE_WIDTH = 1 - -#GRID_STAT_GRID_WEIGHT_FLAG = -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as ensemble-probabilistic. -# This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensprob -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; - -{%- set field_thresholds = [] %} -{%- if accum_hh == '01' %} - {%- set field_thresholds = ['gt0.0', 'ge0.254', 'ge0.508', 'ge2.54'] %} -{%- elif accum_hh == '03' %} - {%- set field_thresholds = ['gt0.0', 'ge0.508', 'ge2.54', 'ge6.350'] %} -{%- elif accum_hh == '06' %} - {%- set field_thresholds = ['gt0.0', 'ge2.54', 'ge6.350', 'ge12.700'] %} -{%- elif accum_hh == '24' %} - {%- set field_thresholds = ['gt0.0', 'ge6.350', 'ge12.700', 'ge25.400'] %} -{%- endif %} -# -# List of forecast and corresponding observation fields to process. -# -# FREQ -# Process as probability -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[0]}} -FCST_VAR1_LEVELS = A{{accum_hh}} -FCST_VAR1_THRESH = ==0.1 -OBS_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR1_LEVELS = A{{accum_hh}} -OBS_VAR1_THRESH = {{field_thresholds[0]}} - -FCST_VAR2_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[1]}} -FCST_VAR2_LEVELS = A{{accum_hh}} -FCST_VAR2_THRESH = ==0.1 -OBS_VAR2_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR2_LEVELS = A{{accum_hh}} -OBS_VAR2_THRESH = {{field_thresholds[1]}} - -FCST_VAR3_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[2]}} -FCST_VAR3_LEVELS = A{{accum_hh}} -FCST_VAR3_THRESH = ==0.1 -OBS_VAR3_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR3_LEVELS = A{{accum_hh}} -OBS_VAR3_THRESH = {{field_thresholds[2]}} - -FCST_VAR4_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[3]}} -FCST_VAR4_LEVELS = A{{accum_hh}} -FCST_VAR4_THRESH = ==0.1 -OBS_VAR4_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR4_LEVELS = A{{accum_hh}} -OBS_VAR4_THRESH = {{field_thresholds[3]}} - -# -#Process as scalars for neighborhood methods -## Note that the number of forecast and obs thresholds must match -## but won't actually be applied to NBRCNT outputs with "nbrhd.field = OBS;" -# -FCST_VAR5_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[0]}} -FCST_VAR5_LEVELS = A{{accum_hh}} -FCST_VAR5_THRESH = ==0.1 -FCST_VAR5_OPTIONS = prob = FALSE; -OBS_VAR5_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR5_LEVELS = A{{accum_hh}} -OBS_VAR5_THRESH = {{field_thresholds[0]}} -OBS_VAR5_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR6_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[1]}} -FCST_VAR6_LEVELS = A{{accum_hh}} -FCST_VAR6_THRESH = ==0.1 -FCST_VAR6_OPTIONS = prob = FALSE; -OBS_VAR6_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR6_LEVELS = A{{accum_hh}} -OBS_VAR6_THRESH = {{field_thresholds[1]}} -OBS_VAR6_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR7_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[2]}} -FCST_VAR7_LEVELS = A{{accum_hh}} -FCST_VAR7_THRESH = ==0.1 -FCST_VAR7_OPTIONS = prob = FALSE; -OBS_VAR7_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR7_LEVELS = A{{accum_hh}} -OBS_VAR7_THRESH = {{field_thresholds[2]}} -OBS_VAR7_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR8_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[3]}} -FCST_VAR8_LEVELS = A{{accum_hh}} -FCST_VAR8_THRESH = ==0.1 -FCST_VAR8_OPTIONS = prob = FALSE; -OBS_VAR8_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -OBS_VAR8_LEVELS = A{{accum_hh}} -OBS_VAR8_THRESH = {{field_thresholds[3]}} -OBS_VAR8_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -# -# Forecast data time window(s). -# -FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = 0 -OBS_GRID_STAT_FILE_WINDOW_END = 0 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = True -FCST_PROB_IN_GRIB_PDS = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = NONE -GRID_STAT_OUTPUT_FLAG_CTC = NONE -GRID_STAT_OUTPUT_FLAG_CTS = NONE -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = NONE -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -GRID_STAT_OUTPUT_FLAG_PCT = STAT -GRID_STAT_OUTPUT_FLAG_PSTD = STAT -GRID_STAT_OUTPUT_FLAG_PJC = STAT -GRID_STAT_OUTPUT_FLAG_PRC = STAT -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = NONE -GRID_STAT_OUTPUT_FLAG_NBRCTS = NONE -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensprob_ASNOW.conf b/parm/metplus/GridStat_ensprob_ASNOW.conf deleted file mode 100644 index ecd17f681b..0000000000 --- a/parm/metplus/GridStat_ensprob_ASNOW.conf +++ /dev/null @@ -1,384 +0,0 @@ -# Ensemble probabilistic GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -#GRID_STAT_INTERP_FIELD = BOTH -#GRID_STAT_INTERP_VLD_THRESH = 1.0 -#GRID_STAT_INTERP_SHAPE = SQUARE -#GRID_STAT_INTERP_TYPE_METHOD = NEAREST -#GRID_STAT_INTERP_TYPE_WIDTH = 1 - -#GRID_STAT_GRID_WEIGHT_FLAG = -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as ensemble-probabilistic. -# This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensprob -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; - -{%- set field_thresholds = [] %} -{%- if accum_hh == '06' %} - {%- set field_thresholds = ['gt0.0', 'ge2.54', 'ge5.08', 'ge10.16', 'ge20.32'] %} -{%- elif accum_hh == '24' %} - {%- set field_thresholds = ['gt0.0', 'ge2.54', 'ge5.08', 'ge10.16', 'ge20.32'] %} -{%- endif %} -# -# List of forecast and corresponding observation fields to process. -# -# FREQ -# Process as probability -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[0]}} -FCST_VAR1_LEVELS = A{{accum_hh}} -FCST_VAR1_THRESH = ==0.1 -OBS_VAR1_NAME = {{fieldname_in_obs_input}} -OBS_VAR1_LEVELS = A{{accum_hh}} -OBS_VAR1_THRESH = {{field_thresholds[0]}} -OBS_VAR1_OPTIONS = convert(x) = 100.0*x; - -FCST_VAR2_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[1]}} -FCST_VAR2_LEVELS = A{{accum_hh}} -FCST_VAR2_THRESH = ==0.1 -OBS_VAR2_NAME = {{fieldname_in_obs_input}} -OBS_VAR2_LEVELS = A{{accum_hh}} -OBS_VAR2_THRESH = {{field_thresholds[1]}} -OBS_VAR2_OPTIONS = convert(x) = 100.0*x; - -FCST_VAR3_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[2]}} -FCST_VAR3_LEVELS = A{{accum_hh}} -FCST_VAR3_THRESH = ==0.1 -OBS_VAR3_NAME = {{fieldname_in_obs_input}} -OBS_VAR3_LEVELS = A{{accum_hh}} -OBS_VAR3_THRESH = {{field_thresholds[2]}} -OBS_VAR3_OPTIONS = convert(x) = 100.0*x; - -FCST_VAR4_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[3]}} -FCST_VAR4_LEVELS = A{{accum_hh}} -FCST_VAR4_THRESH = ==0.1 -OBS_VAR4_NAME = {{fieldname_in_obs_input}} -OBS_VAR4_LEVELS = A{{accum_hh}} -OBS_VAR4_THRESH = {{field_thresholds[3]}} -OBS_VAR4_OPTIONS = convert(x) = 100.0*x; - -FCST_VAR5_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[4]}} -FCST_VAR5_LEVELS = A{{accum_hh}} -FCST_VAR5_THRESH = ==0.1 -OBS_VAR5_NAME = {{fieldname_in_obs_input}} -OBS_VAR5_LEVELS = A{{accum_hh}} -OBS_VAR5_THRESH = {{field_thresholds[4]}} -OBS_VAR5_OPTIONS = convert(x) = 100.0*x; - -# -#Process as scalars for neighborhood methods -## Note that the number of forecast and obs thresholds must match -## but won't actually be applied to NBRCNT outputs with "nbrhd.field = OBS;" -# -FCST_VAR6_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[0]}} -FCST_VAR6_LEVELS = A{{accum_hh}} -FCST_VAR6_THRESH = ==0.1 -FCST_VAR6_OPTIONS = prob = FALSE; -OBS_VAR6_NAME = {{fieldname_in_obs_input}} -OBS_VAR6_LEVELS = A{{accum_hh}} -OBS_VAR6_THRESH = {{field_thresholds[0]}} -OBS_VAR6_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }; - convert(x) = 100.0*x; - -FCST_VAR7_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[1]}} -FCST_VAR7_LEVELS = A{{accum_hh}} -FCST_VAR7_THRESH = ==0.1 -FCST_VAR7_OPTIONS = prob = FALSE; -OBS_VAR7_NAME = {{fieldname_in_obs_input}} -OBS_VAR7_LEVELS = A{{accum_hh}} -OBS_VAR7_THRESH = {{field_thresholds[1]}} -OBS_VAR7_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }; - convert(x) = 100.0*x; - -FCST_VAR8_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[2]}} -FCST_VAR8_LEVELS = A{{accum_hh}} -FCST_VAR8_THRESH = ==0.1 -FCST_VAR8_OPTIONS = prob = FALSE; -OBS_VAR8_NAME = {{fieldname_in_obs_input}} -OBS_VAR8_LEVELS = A{{accum_hh}} -OBS_VAR8_THRESH = {{field_thresholds[2]}} -OBS_VAR8_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }; - convert(x) = 100.0*x; - -FCST_VAR9_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[3]}} -FCST_VAR9_LEVELS = A{{accum_hh}} -FCST_VAR9_THRESH = ==0.1 -FCST_VAR9_OPTIONS = prob = FALSE; -OBS_VAR9_NAME = {{fieldname_in_obs_input}} -OBS_VAR9_LEVELS = A{{accum_hh}} -OBS_VAR9_THRESH = {{field_thresholds[3]}} -OBS_VAR9_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }; - convert(x) = 100.0*x; - -FCST_VAR10_NAME = {{fieldname_in_met_output}}_{{accum_hh}}_A{{accum_no_pad}}_ENS_FREQ_{{field_thresholds[4]}} -FCST_VAR10_LEVELS = A{{accum_hh}} -FCST_VAR10_THRESH = ==0.1 -FCST_VAR10_OPTIONS = prob = FALSE; -OBS_VAR10_NAME = {{fieldname_in_obs_input}} -OBS_VAR10_LEVELS = A{{accum_hh}} -OBS_VAR10_THRESH = {{field_thresholds[4]}} -OBS_VAR10_OPTIONS = nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; }; - convert(x) = 100.0*x; - -# -# Forecast data time window(s). -# -FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = 0 -OBS_GRID_STAT_FILE_WINDOW_END = 0 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = True -FCST_PROB_IN_GRIB_PDS = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = NONE -GRID_STAT_OUTPUT_FLAG_CTC = NONE -GRID_STAT_OUTPUT_FLAG_CTS = NONE -#GRID_STAT_OUTPUT_FLAG_MCTC = NONE -#GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = NONE -#GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -#GRID_STAT_OUTPUT_FLAG_VCNT = NONE -GRID_STAT_OUTPUT_FLAG_PCT = STAT -GRID_STAT_OUTPUT_FLAG_PSTD = STAT -GRID_STAT_OUTPUT_FLAG_PJC = STAT -GRID_STAT_OUTPUT_FLAG_PRC = STAT -#GRID_STAT_OUTPUT_FLAG_ECLV = BOTH -GRID_STAT_OUTPUT_FLAG_NBRCTC = NONE -GRID_STAT_OUTPUT_FLAG_NBRCTS = NONE -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -#GRID_STAT_OUTPUT_FLAG_GRAD = BOTH -#GRID_STAT_OUTPUT_FLAG_DMAP = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -#GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -#GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -#GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensprob_REFC.conf b/parm/metplus/GridStat_ensprob_REFC.conf deleted file mode 100644 index 95e19af1ce..0000000000 --- a/parm/metplus/GridStat_ensprob_REFC.conf +++ /dev/null @@ -1,382 +0,0 @@ -# Ensemble probabilistic GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -GRID_STAT_INTERP_FIELD = NONE -GRID_STAT_INTERP_VLD_THRESH = 1.0 -GRID_STAT_INTERP_SHAPE = SQUARE -GRID_STAT_INTERP_TYPE_METHOD = NEAREST -GRID_STAT_INTERP_TYPE_WIDTH = 1 - -GRID_STAT_GRID_WEIGHT_FLAG = NONE -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as ensemble-probabilistic. -# This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensprob -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# List of forecast and corresponding observation fields to process. -# -# FREQ -# Process as probability -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge20 -FCST_VAR1_LEVELS = L0 -FCST_VAR1_THRESH = ==0.1 -OBS_VAR1_NAME = MergedReflectivityQCComposite -OBS_VAR1_LEVELS = Z500 -OBS_VAR1_THRESH = ge20 -OBS_VAR1_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - -FCST_VAR2_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge30 -FCST_VAR2_LEVELS = L0 -FCST_VAR2_THRESH = ==0.1 -OBS_VAR2_NAME = MergedReflectivityQCComposite -OBS_VAR2_LEVELS = Z500 -OBS_VAR2_THRESH = ge30 -OBS_VAR2_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - -FCST_VAR3_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge40 -FCST_VAR3_LEVELS = L0 -FCST_VAR3_THRESH = ==0.1 -OBS_VAR3_NAME = MergedReflectivityQCComposite -OBS_VAR3_LEVELS = Z500 -OBS_VAR3_THRESH = ge40 -OBS_VAR3_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - -FCST_VAR4_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge50 -FCST_VAR4_LEVELS = L0 -FCST_VAR4_THRESH = ==0.1 -OBS_VAR4_NAME = MergedReflectivityQCComposite -OBS_VAR4_LEVELS = Z500 -OBS_VAR4_THRESH = ge50 -OBS_VAR4_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - -# -#Process as scalars for neighborhood methods -## Note that the number of forecast and obs thresholds must match -## but won't actually be applied to NBRCNT outputs with "nbrhd.field = OBS;" -# -FCST_VAR5_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge20 -FCST_VAR5_LEVELS = L0 -FCST_VAR5_THRESH = ==0.1 -FCST_VAR5_OPTIONS = prob = FALSE; -OBS_VAR5_NAME = MergedReflectivityQCComposite -OBS_VAR5_LEVELS = Z500 -OBS_VAR5_THRESH = ge20 -OBS_VAR5_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR6_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge30 -FCST_VAR6_LEVELS = L0 -FCST_VAR6_THRESH = ==0.1 -FCST_VAR6_OPTIONS = prob = FALSE; -OBS_VAR6_NAME = MergedReflectivityQCComposite -OBS_VAR6_LEVELS = Z500 -OBS_VAR6_THRESH = ge30 -OBS_VAR6_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR7_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge40 -FCST_VAR7_LEVELS = L0 -FCST_VAR7_THRESH = ==0.1 -FCST_VAR7_OPTIONS = prob = FALSE; -OBS_VAR7_NAME = MergedReflectivityQCComposite -OBS_VAR7_LEVELS = Z500 -OBS_VAR7_THRESH = ge40 -OBS_VAR7_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR8_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge50 -FCST_VAR8_LEVELS = L0 -FCST_VAR8_THRESH = ==0.1 -FCST_VAR8_OPTIONS = prob = FALSE; -OBS_VAR8_NAME = MergedReflectivityQCComposite -OBS_VAR8_LEVELS = Z500 -OBS_VAR8_THRESH = ge50 -OBS_VAR8_OPTIONS = censor_thresh = lt-20; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -# -# Forecast data time window(s). -# -#FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -#FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300 -OBS_GRID_STAT_FILE_WINDOW_END = 300 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = True -FCST_PROB_IN_GRIB_PDS = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = NONE -GRID_STAT_OUTPUT_FLAG_CTC = NONE -GRID_STAT_OUTPUT_FLAG_CTS = NONE -GRID_STAT_OUTPUT_FLAG_MCTC = NONE -GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = NONE -GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -GRID_STAT_OUTPUT_FLAG_VCNT = NONE -GRID_STAT_OUTPUT_FLAG_PCT = STAT -GRID_STAT_OUTPUT_FLAG_PSTD = STAT -GRID_STAT_OUTPUT_FLAG_PJC = STAT -GRID_STAT_OUTPUT_FLAG_PRC = STAT -GRID_STAT_OUTPUT_FLAG_ECLV = NONE -GRID_STAT_OUTPUT_FLAG_NBRCTC = NONE -GRID_STAT_OUTPUT_FLAG_NBRCTS = NONE -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -GRID_STAT_OUTPUT_FLAG_GRAD = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_ensprob_RETOP.conf b/parm/metplus/GridStat_ensprob_RETOP.conf deleted file mode 100644 index d1f218bea8..0000000000 --- a/parm/metplus/GridStat_ensprob_RETOP.conf +++ /dev/null @@ -1,390 +0,0 @@ -# Ensemble probabilistic GridStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = GridStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {GRID_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to GridStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -GRID_STAT_CONFIG_FILE = {PARM_BASE}/met_config/GridStatConfig_wrapped - -# grid to remap data. Value is set as the 'to_grid' variable in the 'regrid' dictionary -# See MET User's Guide for more information -GRID_STAT_REGRID_TO_GRID = FCST -GRID_STAT_REGRID_VLD_THRESH = 0.5 -GRID_STAT_REGRID_METHOD = BUDGET -GRID_STAT_REGRID_WIDTH = 2 -GRID_STAT_REGRID_SHAPE = SQUARE - -GRID_STAT_INTERP_FIELD = NONE -GRID_STAT_INTERP_VLD_THRESH = 1.0 -GRID_STAT_INTERP_SHAPE = SQUARE -GRID_STAT_INTERP_TYPE_METHOD = NEAREST -GRID_STAT_INTERP_TYPE_WIDTH = 1 - -GRID_STAT_GRID_WEIGHT_FLAG = NONE -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as ensemble-probabilistic. -# This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensprob -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -GRID_STAT_DESC = NA - -# List of variables to compare in GridStat - FCST_VAR1 variables correspond -# to OBS_VAR1 variables -# Note [FCST/OBS/BOTH]_GRID_STAT_VAR_NAME can be used instead if different evaluations -# are needed for different tools - -GRID_STAT_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; cnt_thresh = [NA]; cnt_logic = UNION; wind_thresh = [NA]; wind_logic = UNION; ci_alpha = [0.05]; rank_corr_flag = FALSE; -# -# List of forecast and corresponding observation fields to process. -# -# FREQ -# Process as probability -# -FCST_VAR1_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge20 -FCST_VAR1_LEVELS = L0 -FCST_VAR1_THRESH = ==0.1 -OBS_VAR1_NAME = EchoTop18 -OBS_VAR1_LEVELS = Z500 -OBS_VAR1_THRESH = ge20 -OBS_VAR1_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; - -FCST_VAR2_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge30 -FCST_VAR2_LEVELS = L0 -FCST_VAR2_THRESH = ==0.1 -OBS_VAR2_NAME = EchoTop18 -OBS_VAR2_LEVELS = Z500 -OBS_VAR2_THRESH = ge30 -OBS_VAR2_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; - -FCST_VAR3_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge40 -FCST_VAR3_LEVELS = L0 -FCST_VAR3_THRESH = ==0.1 -OBS_VAR3_NAME = EchoTop18 -OBS_VAR3_LEVELS = Z500 -OBS_VAR3_THRESH = ge40 -OBS_VAR3_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; - -FCST_VAR4_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge50 -FCST_VAR4_LEVELS = L0 -FCST_VAR4_THRESH = ==0.1 -OBS_VAR4_NAME = EchoTop18 -OBS_VAR4_LEVELS = Z500 -OBS_VAR4_THRESH = ge50 -OBS_VAR4_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; - -# -#Process as scalars for neighborhood methods -## Note that the number of forecast and obs thresholds must match -## but won't actually be applied to NBRCNT outputs with "nbrhd.field = OBS;" -# -FCST_VAR5_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge20 -FCST_VAR5_LEVELS = L0 -FCST_VAR5_THRESH = ==0.1 -FCST_VAR5_OPTIONS = prob = FALSE; -OBS_VAR5_NAME = EchoTop18 -OBS_VAR5_LEVELS = Z500 -OBS_VAR5_THRESH = ge20 -OBS_VAR5_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; - nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR6_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge30 -FCST_VAR6_LEVELS = L0 -FCST_VAR6_THRESH = ==0.1 -FCST_VAR6_OPTIONS = prob = FALSE; -OBS_VAR6_NAME = EchoTop18 -OBS_VAR6_LEVELS = Z500 -OBS_VAR6_THRESH = ge30 -OBS_VAR6_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; - nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR7_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge40 -FCST_VAR7_LEVELS = L0 -FCST_VAR7_THRESH = ==0.1 -FCST_VAR7_OPTIONS = prob = FALSE; -OBS_VAR7_NAME = EchoTop18 -OBS_VAR7_LEVELS = Z500 -OBS_VAR7_THRESH = ge40 -OBS_VAR7_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; - nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -FCST_VAR8_NAME = {{fieldname_in_met_output}}_L0_ENS_FREQ_ge50 -FCST_VAR8_LEVELS = L0 -FCST_VAR8_THRESH = ==0.1 -FCST_VAR8_OPTIONS = prob = FALSE; -OBS_VAR8_NAME = EchoTop18 -OBS_VAR8_LEVELS = Z500 -OBS_VAR8_THRESH = ge50 -OBS_VAR8_OPTIONS = censor_thresh = lt-20.0; - censor_val = -20.0; - cnt_thresh = [ >15 ]; - cnt_logic = UNION; - convert(x) = x * 3280.84 * 0.001; - nbrhd = { field = OBS; shape = SQUARE; width = [ 1 ]; vld_thresh = 1.0; } - -# -# Forecast data time window(s). -# -#FCST_GRID_STAT_FILE_WINDOW_BEGIN = 0 -#FCST_GRID_STAT_FILE_WINDOW_END = 0 -# -# Observation data time window(s). -# -OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300 -OBS_GRID_STAT_FILE_WINDOW_END = 300 - -# MET GridStat neighborhood values -# See the MET User's Guide GridStat section for more information -GRID_STAT_NEIGHBORHOOD_FIELD = - -# width value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_WIDTH = - -# shape value passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_SHAPE = - -# cov thresh list passed to nbrhd dictionary in the MET config file -GRID_STAT_NEIGHBORHOOD_COV_THRESH = >=0.5 - -# Set to true to run GridStat separately for each field specified -# Set to false to create one run of GridStat per run time that -# includes all fields specified. -GRID_STAT_ONCE_PER_FIELD = False -# -# Set to true if forecast data is probabilistic. -# -FCST_IS_PROB = True -FCST_PROB_IN_GRIB_PDS = False -# -# Only used if FCST_IS_PROB is true - sets probabilistic threshold -# -FCST_GRID_STAT_PROB_THRESH = ==0.1 - -GRID_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# Climatology data -#GRID_STAT_CLIMO_MEAN_FILE_NAME = -#GRID_STAT_CLIMO_MEAN_FIELD = -#GRID_STAT_CLIMO_MEAN_REGRID_METHOD = -#GRID_STAT_CLIMO_MEAN_REGRID_WIDTH = -#GRID_STAT_CLIMO_MEAN_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_MEAN_REGRID_SHAPE = -#GRID_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_MEAN_MATCH_MONTH = -#GRID_STAT_CLIMO_MEAN_DAY_INTERVAL = -#GRID_STAT_CLIMO_MEAN_HOUR_INTERVAL = - -#GRID_STAT_CLIMO_STDEV_FILE_NAME = -#GRID_STAT_CLIMO_STDEV_FIELD = -#GRID_STAT_CLIMO_STDEV_REGRID_METHOD = -#GRID_STAT_CLIMO_STDEV_REGRID_WIDTH = -#GRID_STAT_CLIMO_STDEV_REGRID_VLD_THRESH = -#GRID_STAT_CLIMO_STDEV_REGRID_SHAPE = -#GRID_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = -#GRID_STAT_CLIMO_STDEV_MATCH_MONTH = -#GRID_STAT_CLIMO_STDEV_DAY_INTERVAL = -#GRID_STAT_CLIMO_STDEV_HOUR_INTERVAL = - -GRID_STAT_CLIMO_CDF_BINS = 1 -#GRID_STAT_CLIMO_CDF_CENTER_BINS = False -#GRID_STAT_CLIMO_CDF_WRITE_BINS = True - -GRID_STAT_MASK_GRID = - -# Statistical output types -GRID_STAT_OUTPUT_FLAG_FHO = NONE -GRID_STAT_OUTPUT_FLAG_CTC = NONE -GRID_STAT_OUTPUT_FLAG_CTS = NONE -GRID_STAT_OUTPUT_FLAG_MCTC = NONE -GRID_STAT_OUTPUT_FLAG_MCTS = NONE -GRID_STAT_OUTPUT_FLAG_CNT = NONE -GRID_STAT_OUTPUT_FLAG_SL1L2 = NONE -GRID_STAT_OUTPUT_FLAG_SAL1L2 = NONE -GRID_STAT_OUTPUT_FLAG_VL1L2 = NONE -GRID_STAT_OUTPUT_FLAG_VAL1L2 = NONE -GRID_STAT_OUTPUT_FLAG_VCNT = NONE -GRID_STAT_OUTPUT_FLAG_PCT = STAT -GRID_STAT_OUTPUT_FLAG_PSTD = STAT -GRID_STAT_OUTPUT_FLAG_PJC = STAT -GRID_STAT_OUTPUT_FLAG_PRC = STAT -GRID_STAT_OUTPUT_FLAG_ECLV = NONE -GRID_STAT_OUTPUT_FLAG_NBRCTC = NONE -GRID_STAT_OUTPUT_FLAG_NBRCTS = NONE -GRID_STAT_OUTPUT_FLAG_NBRCNT = STAT -GRID_STAT_OUTPUT_FLAG_GRAD = NONE - -# NetCDF matched pairs output file -#GRID_STAT_NC_PAIRS_VAR_NAME = -GRID_STAT_NC_PAIRS_FLAG_LATLON = FALSE -GRID_STAT_NC_PAIRS_FLAG_RAW = FALSE -GRID_STAT_NC_PAIRS_FLAG_DIFF = FALSE -GRID_STAT_NC_PAIRS_FLAG_CLIMO = FALSE -GRID_STAT_NC_PAIRS_FLAG_WEIGHT = FALSE -GRID_STAT_NC_PAIRS_FLAG_NBRHD = FALSE -GRID_STAT_NC_PAIRS_FLAG_FOURIER = FALSE -GRID_STAT_NC_PAIRS_FLAG_GRADIENT = FALSE -GRID_STAT_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE -GRID_STAT_NC_PAIRS_FLAG_APPLY_MASK = FALSE - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to GridStat. -# -OBS_GRID_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to GridStat. -# -FCST_GRID_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to GridStat. Not used in -# this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from GridStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -GRID_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to GridStat relative to -# OBS_GRID_STAT_INPUT_DIR. -# -OBS_GRID_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to GridStat relative to -# FCST_GRID_STAT_INPUT_DIR. -# -FCST_GRID_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from GridStat relative to GRID_STAT_OUTPUT_DIR. -# -GRID_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to GridStat relative to -# GRID_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -GRID_STAT_CLIMO_STDEV_INPUT_TEMPLATE = -# -# Variable used to specify one or more verification mask files for -# GridStat. Not used for this example. -# -GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly diff --git a/parm/metplus/GridStat_or_PointStat.conf b/parm/metplus/GridStat_or_PointStat.conf new file mode 100644 index 0000000000..39d34eb24f --- /dev/null +++ b/parm/metplus/GridStat_or_PointStat.conf @@ -0,0 +1,863 @@ +# {{MetplusToolName}} METplus Configuration + +[config] + +# List of applications (tools) to run. +PROCESS_LIST = {{MetplusToolName}} + +# time looping - options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +LOOP_BY = INIT + +# Format of INIT_BEG and INIT_END using % items +# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. +# see www.strftime.org for more information +# %Y%m%d%H expands to YYYYMMDDHH +INIT_TIME_FMT = %Y%m%d%H + +# Start time for METplus run - must match INIT_TIME_FMT +INIT_BEG = {{cdate}} + +# End time for METplus run - must match INIT_TIME_FMT +INIT_END = {{cdate}} + +# Increment between METplus runs (in seconds if no units are specified). +# Must be >= 60 seconds. +INIT_INCREMENT = 3600 + +# List of forecast leads to process for each run time (init or valid) +# In hours if units are not specified +# If unset, defaults to 0 (don't loop through forecast leads) +LEAD_SEQ = {{fhr_list}} +# +# Order of loops to process data - Options are times, processes +# Not relevant if only one item is in the PROCESS_LIST +# times = run all wrappers in the PROCESS_LIST for a single run time, then +# increment the run time and run all wrappers again until all times have +# been evaluated. +# processes = run the first wrapper in the PROCESS_LIST for all times +# specified, then repeat for the next item in the PROCESS_LIST until all +# wrappers have been run +# +LOOP_ORDER = times +# +# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. +# +LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} +# +# Specify the name of the METplus log file. +# +LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} +# +# Specify the location and name of the final METplus conf file. +# +METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}} +# +# Location of MET configuration file to pass to {{MetplusToolName}}. +# +# References PARM_BASE, which is the location of the parm directory +# corresponding to the ush directory of the run_metplus.py script that +# is called or the value of the environment variable METPLUS_PARM_BASE +# if set. +# +{{METPLUS_TOOL_NAME}}_CONFIG_FILE = {PARM_BASE}/met_config/{{MetplusToolName}}Config_wrapped + +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} +# +# Grid to remap data. Value is set as the 'to_grid' variable in the +# 'regrid' dictionary. See MET User's Guide for more information. +# +{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = FCST +{{METPLUS_TOOL_NAME}}_REGRID_VLD_THRESH = 0.5 +{{METPLUS_TOOL_NAME}}_REGRID_METHOD = BUDGET +{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2 +{{METPLUS_TOOL_NAME}}_REGRID_SHAPE = SQUARE +{%- elif (METPLUS_TOOL_NAME == 'POINT_STAT') %} +# +# Regrid to specified grid. Indicate NONE if no regridding, or the grid id +# (e.g. G212) +# +{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = NONE +{{METPLUS_TOOL_NAME}}_REGRID_METHOD = BILIN +{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2 +{%- endif %} + +{%- if (METPLUS_TOOL_NAME == 'POINT_STAT') %} +# +{{METPLUS_TOOL_NAME}}_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA +#{{METPLUS_TOOL_NAME}}_OBS_QUALITY_EXC = +{%- endif %} + +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} + + {%- if (input_field_group == 'APCP') %} + +#{{METPLUS_TOOL_NAME}}_INTERP_FIELD = BOTH +#{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = 1.0 +#{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = SQUARE +#{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = NEAREST +#{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 1 + +#{{METPLUS_TOOL_NAME}}_GRID_WEIGHT_FLAG = + {%- elif input_field_group in ['REFC', 'RETOP'] %} + +{{METPLUS_TOOL_NAME}}_INTERP_FIELD = NONE +{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = 1.0 +{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = SQUARE +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = NEAREST +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 1 + +{{METPLUS_TOOL_NAME}}_GRID_WEIGHT_FLAG = NONE + {%- endif %} + +{%- elif (METPLUS_TOOL_NAME == 'POINT_STAT') %} + +#{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = BILIN +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 2 + +{%- endif %} +# +# Name to identify model (forecast) data in output. +# +# The variable MODEL is recorded in the stat files, and the data in +# these files is then plotted (e.g. using METViewer). Here, we add a +# suffix to MODEL that identifies the forecast ensemble member. This +# makes it easier to identify each curve. +# +MODEL = {{vx_fcst_model_name}}_{{ensmem_name}} +# +# Name to identify observation data in output. +# +OBTYPE = {{obtype}} +# +# Value to enter under the DESC column in the output stat file. Here, +# we store the value of the original lead in this column, i.e. the lead +# with zero corresponding to the actual start time of the forecast (which +# is (cdate - time_lag)), not to cdate. This is just the lead in +# LEAD_SEQ with the time lag (time_lag) of the current forecast member +# added on. +# +# Uncomment this line only after upgrading to METplus 5.x. +#{{METPLUS_TOOL_NAME}}_DESC = {lead?fmt=%H%M%S?shift={{time_lag}}} +{{METPLUS_TOOL_NAME}}_DESC = NA +# +# Verification Masking regions +# Indicate which grid and polygon masking region, if applicable +# +{{METPLUS_TOOL_NAME}}_MASK_GRID = + +{%- if (METPLUS_TOOL_NAME == 'POINT_STAT') %} +# +# List of full path to poly masking files. NOTE: Only short lists of poly +# files work (those that fit on one line), a long list will result in an +# environment variable that is too long, resulting in an error. For long +# lists of poly masking files (i.e. all the mask files in the NCEP_mask +# directory), define these in the METplus {{MetplusToolName}} configuration file. +# +{{METPLUS_TOOL_NAME}}_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly +{{METPLUS_TOOL_NAME}}_STATION_ID = + +# Message types, if all message types are to be returned, leave this empty, +# otherwise indicate the message types of interest. +{{METPLUS_TOOL_NAME}}_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} +{%- endif %} +{%- set overrides_indent_len = 0 %} +{%- set overrides_indent = '' %} +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} + {%- set overrides_indent_len = 33 %} + {%- set overrides_indent = ' '*overrides_indent_len %} +# +# Overrides of MET configuration defaults. +# +{{METPLUS_TOOL_NAME}}_MET_CONFIG_OVERRIDES = cat_thresh = [NA]; +{{overrides_indent}}cnt_thresh = [NA]; +{{overrides_indent}}cnt_logic = UNION; +{{overrides_indent}}wind_thresh = [NA]; +{{overrides_indent}}wind_logic = UNION; +{{overrides_indent}}ci_alpha = [0.05]; +{{overrides_indent}}rank_corr_flag = FALSE; +{%- endif %} +# +# List of forecast and corresponding observation fields to process. +# +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} + {%- if input_field_group in ['APCP', 'ASNOW'] %} +# Note that for accumulated fields such as APCP and ASNOW, in the input +# forecast and observation files (which are generated by MET's PcpCombine +# tool) the accumulation period is appended to the field name, so the +# same is done here. +# + {%- endif %} +{%- endif %} +# Note on use of set_attr_lead and ensemble member time-lagging: +# ------------------------------------------------------------- +# The set_attr_lead parameter appearing below in [FCST|OBS]_VAR_OPTIONS +# specifies the lead to use both in naming of the output .stat and .nc +# files and for setting the lead values contained in those files. This +# option causes MET/METplus to use the lead values in the variable LEAD_SEQ +# set above, which are the same for all ensemble forecast members (i.e. +# regardless of whether members are time lagged with respect to the +# nominal cycle date specified by cdate). If set_attr_lead were not +# specified as below, then MET/METplus would get the lead from the input +# forecast file, and that would in general differ from one ensemble member +# to the next depending on whether the member is time-lagged. That would +# cause confusion, so here, we always use lead values with zero lead +# corresponding to the nominal cdate. +# +{#- +Import the file containing jinja macros. +#} +{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %} + +{#- +Jinja requires certain variables to be defined globally within the template +before they can be used in if-statements and other scopes (see Jinja scoping +rules). Define such variables. +#} +{%- set indx_input_level_fcst = '' %} +{%- set indx_input_thresh_fcst = '' %} +{%- set error_msg = '' %} + +{%- set field_fcst = '' %} +{%- set field_obs = '' %} +{%- set level_fcst = '' %} +{%- set level_obs = '' %} +{%- set thresh_fcst = '' %} +{%- set thresh_obs = '' %} + +{%- set levels_fcst = '' %} +{%- set levels_obs = '' %} +{%- set threshes_cpld = [] %} +{%- set valid_threshes_fcst = [] %} +{%- set valid_threshes_obs = [] %} +{%- set threshes_fcst = [] %} +{%- set threshes_obs = [] %} + +{#- +Get the set of valid field groups and ensure that the specified input +field group appears in this list. +#} +{%- set valid_field_groups = vx_config_dict.keys()|list %} +{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }} + +{#- +Reset the input forecast level so that if it happens to be an accumulation +(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g. +reset to 'A3'). +#} +{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} + +{#- +Extract from the configuration dictionary the set (which itself is a +dictionary) of fields, levels, and thresholds corresponding to the input +field group. Then set the delimiter string that separates forecast and +observation values in the various items (i.e. dictionary keys and values +representing field names, levels, and thresholds) in this dictionary. +#} +{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %} +{%- set delim_str = metplus_macros.set_delim_str() %} + +{#- +Loop over the fields and set field names, levels, thresholds, and/or +options for each field, both for forecasts and for observations, in the +METplus configuration file. +#} +{%- set ns = namespace(var_count = 0) %} +{%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %} + + {%- if delim_str in field_cpld %} + {%- set field_fcst, field_obs = field_cpld.split(delim_str) %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- set field_obs = field_cpld %} + {%- endif %} + + {%- set levels_cpld = levels_threshes_cpld.keys()|list %} + {%- set num_levels = levels_cpld|length %} + +{#- +For convenience, create lists of valid forecast and observation levels +for the current field. +#} + {%- set valid_levels_fcst = [] %} + {%- set valid_levels_obs = [] %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + {%- set tmp = valid_levels_fcst.append(level_fcst) %} + {%- set tmp = valid_levels_obs.append(level_obs) %} + {%- endfor %} + +{#- +Make sure that the input forecast level (input_level_fcst) is set to a +valid value. +#} + {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %} + {%- set error_msg = '\n' ~ +'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~ +'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~ +'for the current forecast field (field_fcst). This is not the case:\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~ +' input_level_fcst = ' ~ input_level_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + +{#- +Increment the METplus variable counter. +#} + {%- set ns.var_count = ns.var_count+1 %} + +{#- +Set jinja parameters needed in setting the forecast and observation field +level(s). +#} + {%- if (input_level_fcst == 'all') %} + {%- set levels_fcst = valid_levels_fcst %} + {%- set levels_obs = valid_levels_obs %} +{#- +If input_level_fcst is set to 'all' and there is more than one level to +be verified for the current field, then the list of forecast thresholds +for each forecast level must be identical to every other. Check for this. +Note that this restriction includes the order of the thresholds, i.e. the +set of thresholds for each level must be in the same order as for all +other levels. Once this is verified, we can set the index of the level +to use when obtaining thresholds to that of the first (index 0), which +will be valid both for the case of num_levels = 1 and num_levels > 1. +#} + {%- if (num_levels > 1) %} + {{- metplus_macros.check_for_identical_threshes_by_level( + field_cpld, levels_threshes_cpld) }} + {%- endif %} + {%- set indx_input_level_fcst = 0 %} +{#- +If input_level_fcst is set to a specific value: + 1) Ensure that input_level_fcst exists in the list of valid forecast + levels. + 2) Get the index of input_level_fcst in the list of valid forecast + levels. + 3) Use this index to set the forecast and observation levels to one- + element lists containing the appropriate level values. +#} + {%- else %} + + {%- if input_level_fcst in valid_levels_fcst %} + {%- set indx_input_level_fcst = valid_levels_fcst.index(input_level_fcst) %} + {%- set levels_fcst = [valid_levels_fcst[indx_input_level_fcst]] %} + {%- set levels_obs = [valid_levels_obs[indx_input_level_fcst]] %} + {%- else %} + {%- set error_msg = '\n' ~ +'For the current forecast field (field_fcst), the input forecast level\n' ~ +'(input_level_fcst) does not exist in the list of valid forecast levels\n' ~ +'(valid_levels_fcst):\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' input_level_fcst = ' ~ input_level_fcst ~ '\n' ~ +' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + + {%- endif %} + +{#- +Set jinja parameters needed in setting the forecast and observation field +threshold(s). +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +Now set the list of valid forecast thresholds to the one corresponding +to the first (zeroth) forecast level in the list of forecast levels set +above. We can do this because, for the case of a single forecast level, +there is only one list of forecast thresholds to consider (the first +one), and for the case of all levels, all levels have the same set of +thresholds (as verified by the check above). +#} + {%- set threshes_cpld = levels_threshes_cpld[levels_cpld[indx_input_level_fcst]] %} + {%- set valid_threshes_fcst = [] %} + {%- set valid_threshes_obs = [] %} + {%- for thresh_cpld in threshes_cpld %} + {%- if delim_str in thresh_cpld %} + {%- set thresh_fcst, thresh_obs = thresh_cpld.split(delim_str) %} + {%- else %} + {%- set thresh_fcst = thresh_cpld %} + {%- set thresh_obs = thresh_cpld %} + {%- endif %} + {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %} + {%- set tmp = valid_threshes_obs.append(thresh_obs) %} + {%- endfor %} +{#- +If input_thresh_fcst is set to 'all', set the list of forecast and +observation thresholds to the full set of valid values. +#} + {%- if (input_thresh_fcst == 'all') %} + + {%- set threshes_fcst = valid_threshes_fcst %} + {%- set threshes_obs = valid_threshes_obs %} +{#- +If input_thresh_fcst is set to a specific value: + 1) Ensure that input_thresh_fcst exists in the list of valid forecast + thresholds. + 2) Get the index of input_thresh_fcst in the list of valid forecast + thresholds. + 3) Use this index to set the forecast and observation threshold to one- + element lists containing the appropriate threshold values. +#} + {%- else %} + + {%- if input_thresh_fcst in valid_threshes_fcst %} + {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %} + {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %} + {%- set threshes_obs = [valid_threshes_obs[indx_input_thresh_fcst]] %} + {%- else %} + {%- set error_msg = '\n' ~ +'For the current forecast field (field_fcst) and list of forecast levels\n' ~ +'(levels_fcst), the input forecast threshold (input_thresh_fcst) does not\n' ~ +'exist in the list of valid forecast thresholds (valid_threshes_fcst):\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' levels_fcst = ' ~ levels_fcst ~ '\n' ~ +' valid_threshes_fcst = ' ~ valid_threshes_fcst ~ '\n' ~ +' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + + {%- endif %} + + {%- endif %} + +{#- +Set forecast field name. Note that this has to exactly match the name +of the field in the input forecast file. + +For accumulated fields, the input forecast file is generated by MET's +PcpCombine tool. In that file, the field name consists of the forecast +field name here (field_fcst) with the accumulation period appended to +it (separated by an underscore), so we must do the same here to get an +exact match. +#} + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{accum_hh}} + {%- else %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}} + {%- endif %} + +{#- +Set forecast field level(s). +#} +FCST_VAR{{ns.var_count}}_LEVELS = {{levels_fcst|join(', ')}} + +{#- +Set forecast field threshold(s). Note that: +1) No forecast thresholds are included in the METplus configuration file + if input_thresh_fcst is set to 'none'. +2) If threshes_fcst has been reset to something other than its default value + value of an empty list, then set the forecast thresholds in the METplus + configuration file because that implies threshes_fcst was set above to + a non-empty value. Then reset threshes_fcst to its default value for + proper processing of thresholds for the next field. +#} + {%- if (input_thresh_fcst != 'none') %} + {%- if (threshes_fcst != []) %} +FCST_VAR{{ns.var_count}}_THRESH = {{threshes_fcst|join(', ')}} + {%- endif %} + {%- set threshes_fcst = [] %} + {%- endif %} + +{#- +Set forecast field options. +#} +FCST_VAR{{ns.var_count}}_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; + + {%- set opts_indent_len = 20 %} + {%- if (ns.var_count > 9) and (ns.var_count <= 99) %} + {%- set opts_indent_len = opts_indent_len + 1 %} + {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %} + {%- set opts_indent_len = opts_indent_len + 2 %} + {%- elif (ns.var_count > 999) %} + {%- set opts_indent_len = opts_indent_len + 3 %} + {%- endif %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if (input_field_group == 'REFC') %} + + {%- if (field_fcst == 'REFC') %} +{{opts_indent}}cnt_thresh = [ >15 ]; +{{opts_indent}}cnt_logic = UNION; + {%- endif %} + + {%- elif (input_field_group == 'RETOP') %} + + {%- if (field_fcst == 'RETOP') %} +{{opts_indent}}convert(x) = x * 3.28084 * 0.001; +{{opts_indent}}cnt_thresh = [ >0 ]; +{{opts_indent}}cnt_logic = UNION; + {%- endif %} + + {%- elif (input_field_group == 'ADPSFC') %} + + {%- if (field_fcst in ['WIND']) %} +{{opts_indent}}GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind. + {%- elif (field_fcst in ['TCDC']) %} +{{opts_indent}}GRIB_lvl_typ = 200; +{{opts_indent}}GRIB2_ipdtmpl_index=[27]; +{{opts_indent}}GRIB2_ipdtmpl_val=[255]; +{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- elif (field_fcst in ['VIS']) %} +{{opts_indent}}censor_thresh = [>16090]; +{{opts_indent}}censor_val = [16090]; +{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- elif (field_fcst in ['HGT']) %} +{{opts_indent}}GRIB_lvl_typ = 215; +{{opts_indent}}desc = "CEILING"; + {%- endif %} + + {%- elif (input_field_group == 'ADPUPA') %} + + {%- if (field_fcst in ['HGT']) %} + {%- if (levels_fcst[0] in ['L0']) %} +{{opts_indent}}GRIB_lvl_typ = 220; + {%- endif %} + {%- elif (field_fcst in ['CAPE']) %} +{{opts_indent}}cnt_thresh = [ >0 ]; + {%- endif %} + + {%- endif %} + +{#- +Set observation field name. Note that this has to exactly match the name +of the field in the input observation file. + +For accumulated fields, the input observation file is generated by MET's +PcpCombine tool. In that file, the field name consists of the observation +field name here (field_obs) with the accumulation period appended to it +(separated by an underscore), so we must do the same here to get an exact +match. + +Note: +Turns out for ASNOW, PcpCombine is not run for obs, so we exclude that +from the "if" clause here (so it goes into the "else"). For workflow +behavior uniformity between APCP and ASNOW, consider running PcpCombine +for ASNOW observations as well (just as it's run for APCP observations). + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +#} + {%- if (input_field_group in ['APCP']) %} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}}_{{accum_hh}} + {%- else %} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}} + {%- endif %} + +{#- +Set observation field level(s). +#} +OBS_VAR{{ns.var_count}}_LEVELS = {{levels_obs|join(', ')}} + +{#- +Set observation field threshold(s). Note that: +1) No observation thresholds are included in the METplus configuration + file if input_thresh_fcst is set to 'none'. +2) If threshes_obs has been reset to something other than its default value + of an empty list, then we set the observation thresholds in the METplus + configuration file because that implies threshes_obs was set above to + a non-empty value. Then reset threshes_obs to its default value for + proper processing of thresholds for the next field. +#} + {%- if (input_thresh_fcst != 'none') %} + {%- if (threshes_obs != []) %} +OBS_VAR{{ns.var_count}}_THRESH = {{threshes_obs|join(', ')}} + {%- endif %} + {%- set threshes_obs = [] %} + {%- endif %} + +{#- +Set observation field options. +#} + {%- set opts_indent_len = opts_indent_len - 1 %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if (input_field_group == 'ASNOW') %} + + {%- if (field_obs == 'ASNOW') %} +OBS_VAR{{ns.var_count}}_OPTIONS = convert(x) = 100.0*x; + {%- endif %} + + {%- elif (input_field_group == 'REFC') %} + + {%- if (field_obs == 'MergedReflectivityQCComposite') %} +OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = [eq-999, <-20]; +{{opts_indent}}censor_val = [-9999, -20]; +{{opts_indent}}cnt_thresh = [ >15 ]; +{{opts_indent}}cnt_logic = UNION; + {%- endif %} + + {%- elif (input_field_group == 'RETOP') %} + + {%- if (field_obs in ['EchoTop18']) %} +OBS_VAR{{ns.var_count}}_OPTIONS = convert(x) = x * 3280.84 * 0.001; +{{opts_indent}}censor_thresh = [<=-9.84252,eq-3.28084]; +{{opts_indent}}censor_val = [-9999,-16.4042]; +{{opts_indent}}cnt_thresh = [ >0 ]; +{{opts_indent}}cnt_logic = UNION; + {%- endif %} + + {%- elif (input_field_group == 'ADPSFC') %} + + {%- if (field_obs in ['WIND']) %} +OBS_VAR{{ns.var_count}}_OPTIONS = GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind. + {%- elif (field_obs in ['VIS']) %} +OBS_VAR{{ns.var_count}}_OPTIONS = censor_thresh = [>16090]; +{{opts_indent}}censor_val = [16090]; +{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- elif (field_obs in ['CEILING']) %} +OBS_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 215; +{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- endif %} + + {%- elif (input_field_group == 'ADPUPA') %} + + {%- if (field_obs in ['CAPE', 'MLCAPE']) %} +OBS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; +{{opts_indent}}cnt_logic = UNION; + {%- elif (field_obs in ['PBL']) %} + {%- if (field_fcst in ['HPBL']) %} +OBS_VAR{{ns.var_count}}_OPTIONS = desc = "TKE"; + {%- elif (field_fcst in ['HGT']) %} +OBS_VAR{{ns.var_count}}_OPTIONS = desc = "RI"; + {%- endif %} + {%- endif %} + + {%- endif %} + +{#- +Print out a newline to separate the settings for the current field (both +forecast and observation settings) from those for the next field. +#} + {{- '\n' }} + +{%- endfor %} + + + +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} + {%- if (input_field_group in ['APCP', 'ASNOW']) %} +# +# Forecast data time window(s). +# +FCST_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_BEGIN = 0 +FCST_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_END = 0 + {%- endif %} +{%- endif %} +# +# Observation data time window(s). +# +{%- set obs_window_begin = 0 %} +{%- set obs_window_end = 0 %} +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} + {%- if (input_field_group in ['REFC', 'RETOP']) %} + {%- set obs_window_begin = -300 %} + {%- set obs_window_end = 300 %} + {%- endif %} +OBS_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_BEGIN = {{obs_window_begin}} +OBS_{{METPLUS_TOOL_NAME}}_FILE_WINDOW_END = {{obs_window_end}} +{%- elif (METPLUS_TOOL_NAME == 'POINT_STAT') %} + {%- set obs_window_begin = -1799 %} + {%- set obs_window_end = 1800 %} +OBS_WINDOW_BEGIN = {{obs_window_begin}} +OBS_WINDOW_END = {{obs_window_end}} +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_END = {OBS_WINDOW_END} +# +# Optional list of offsets to look for point observation data +# +{{METPLUS_TOOL_NAME}}_OFFSETS = 0 +{%- endif %} + +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} + +# MET {{MetplusToolName}} neighborhood values +# See the MET User's Guide {{MetplusToolName}} section for more information +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_FIELD = BOTH + +# width value passed to nbrhd dictionary in the MET config file +{%- if (input_field_group in ['APCP']) %} +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_WIDTH = 3,5,7 +{%- elif (input_field_group in ['ASNOW']) %} +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_WIDTH = 5 +{%- elif (input_field_group in ['REFC', 'RETOP']) %} +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_WIDTH = 1,3,5,7 +{%- endif %} + +# shape value passed to nbrhd dictionary in the MET config file +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_SHAPE = SQUARE + +# cov thresh list passed to nbrhd dictionary in the MET config file +{{METPLUS_TOOL_NAME}}_NEIGHBORHOOD_COV_THRESH = >=0.5 +{%- endif %} +# +# Set to True to run {{MetplusToolName}} separately for each field specified; +# set to False to run {{MetplusToolName}} once per run time that includes all +# fields specified. +# +{{METPLUS_TOOL_NAME}}_ONCE_PER_FIELD = False + +{{METPLUS_TOOL_NAME}}_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} + +# Climatology data +{%- set comment_or_null = '' %} +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} +#{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = + {%- if (input_field_group in ['APCP', 'ASNOW']) %} + {%- set comment_or_null = '#' %} + {%- endif %} + +{{comment_or_null}}{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1 +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = True +{%- elif (METPLUS_TOOL_NAME == 'POINT_STAT') %} +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST + +{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1 +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = True +{%- endif %} + +# Statistical output types +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_FHO = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTC = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTS = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTC = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTS = NONE +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CNT = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SL1L2 = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SAL1L2 = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VL1L2 = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VAL1L2 = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VCNT = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PCT = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PSTD = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PJC = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PRC = NONE +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV = BOTH +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCTC = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCTS = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_NBRCNT = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_GRAD = BOTH +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_DMAP = NONE +{%- elif (METPLUS_TOOL_NAME == 'POINT_STAT') %} +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_FHO = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTC = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTS = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTS = +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CNT = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SL1L2 = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SAL1L2 = +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VL1L2 = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VAL1L2 = +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VCNT = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PCT = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PSTD = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PJC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PRC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECNT = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_RPS = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MPR = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ORANK = +{%- endif %} + +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} + +# NetCDF matched pairs output file +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_VAR_NAME = +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_LATLON = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_RAW = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_DIFF = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_CLIMO = FALSE +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_CLIMO_CDP = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_WEIGHT = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_NBRHD = FALSE +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_FOURIER = FALSE +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_GRADIENT = FALSE +#{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_DISTANCE_MAP = FALSE +{{METPLUS_TOOL_NAME}}_NC_PAIRS_FLAG_APPLY_MASK = FALSE +{%- endif %} + +# End of [config] section and start of [dir] section. +[dir] +# +# Directory containing observation input to {{MetplusToolName}}. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{obs_input_dir}} +# +# Directory containing forecast input to {{MetplusToolName}}. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}} +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR = +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR = +# +# Directory in which to write output from {{MetplusToolName}}. +# +# OUTPUT_BASE apparently has to be set to something; it cannot be left +# to its default value. But it is not explicitly used elsewhere in this +# configuration file. +# +OUTPUT_BASE = {{output_base}} +{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}} +# +# Directory for staging data. +# +STAGING_DIR = {{staging_dir}} + +# End of [dir] section and start of [filename_templates] section. +[filename_templates] +# +# Template for observation input to {{MetplusToolName}} relative to +# OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{obs_input_fn_template}} +# +# Template for forecast input to {{MetplusToolName}} relative to +# FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}} +# +# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR. +# +{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_TEMPLATE = +{#- +Not sure if the following section for ..._VERIFICATION_MASK_TEMPLATE +is also nececcary for PointStat. +#} +{%- if (METPLUS_TOOL_NAME == 'GRID_STAT') %} +# +# Variable used to specify one or more verification mask files for +# {{MetplusToolName}}. Not used for this example. +# +{{METPLUS_TOOL_NAME}}_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly +{%- endif %} diff --git a/parm/metplus/PcpCombine.conf b/parm/metplus/PcpCombine.conf new file mode 100644 index 0000000000..3cee69df1d --- /dev/null +++ b/parm/metplus/PcpCombine.conf @@ -0,0 +1,216 @@ +{%- if FCST_OR_OBS == 'FCST' -%} +# PcpCombine METplus Configuration for Forecasts +{%- elif FCST_OR_OBS == 'OBS' -%} +# PcpCombine METplus Configuration for Observations +{%- endif %} + +[config] + +# List of applications (tools) to run. +PROCESS_LIST = PcpCombine + +# time looping - options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +LOOP_BY = INIT + +# Format of INIT_BEG and INIT_END using % items +# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. +# see www.strftime.org for more information +# %Y%m%d%H expands to YYYYMMDDHH +INIT_TIME_FMT = %Y%m%d%H + +# Start time for METplus run - must match INIT_TIME_FMT +INIT_BEG = {{cdate}} + +# End time for METplus run - must match INIT_TIME_FMT +INIT_END = {{cdate}} + +# Increment between METplus runs (in seconds if no units are specified). +# Must be >= 60 seconds. +INIT_INCREMENT = 3600 + +# List of forecast leads to process for each run time (init or valid) +# In hours if units are not specified +# If unset, defaults to 0 (don't loop through forecast leads) +LEAD_SEQ = {{fhr_list}} + +# Order of loops to process data - Options are times, processes +# Not relevant if only one item is in the PROCESS_LIST +# times = run all wrappers in the PROCESS_LIST for a single run time, then +# increment the run time and run all wrappers again until all times have +# been evaluated. +# processes = run the first wrapper in the PROCESS_LIST for all times +# specified, then repeat for the next item in the PROCESS_LIST until all +# wrappers have been run +LOOP_ORDER = times +# +# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. +# +LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} +# +# Specify the name of the METplus log file. +# +LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} +# +# Specify the location and name of the final METplus conf file. +# +METPLUS_CONF = {% raw %}{{% endraw %}{{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} + +{%- if FCST_OR_OBS == 'FCST' %} +# +# Run PcpCombine on forecast data but not observations (observation input +# files are not provided). +# +OBS_PCP_COMBINE_RUN = False +FCST_PCP_COMBINE_RUN = True +{%- elif FCST_OR_OBS == 'OBS' %} +# +# Run PcpCombine on observation data but not forecasts (forecast input +# files are not provided). +# +OBS_PCP_COMBINE_RUN = True +FCST_PCP_COMBINE_RUN = False +{%- endif %} +# +# Mode of PcpCombine to use (SUM, ADD, SUBTRACT, DERIVE, or CUSTOM). +# +{{FCST_OR_OBS}}_PCP_COMBINE_METHOD = ADD + +{%- if (FCST_OR_OBS == 'FCST') and (input_field_group == 'ASNOW') %} +# +# Specify name of variable for Snowfall Accumulation. +# NOTE: Currently TSNOWP is used which is a constant-density estimate of snowfall accumulation. +# In future RRFS development, a GSL product with variable-density snowfall accumulation +# is planned for UPP. When that is included and turned on in post, this variable may be changed +# to ASNOW. +# +FCST_PCP_COMBINE_INPUT_NAMES = TSNOWP + +FCST_PCP_COMBINE_INPUT_LEVELS = A01 +{%- endif %} +# +# Specify how to name the array in the NetCDF file that PcpCombine +# generates. +# +# For accumulation variables (which is the only type of variable that we +# run PcpCombine on), we add the accumulation period to the variable name +# because this is how METplus normally sets names. This is because, +# epending on the settings in the METplus configuration file, it is +# possible for a single NetCDF output file to contain output for multiple +# accumulations, so even though the "level" attribute of each accumulation +# variable in the output file will contain the level (e.g. "A1" or "A3"), +# the variable names for say the 1-hour and 3-hour accumulations would be +# the same (e.g. both would be "APCP"), which is not allowed and/or would +# cause overwriting of data. To avoid this, METplus includes the level +# as part of the variable name, so we do the same here (even though in +# our case, it is not required because there will only be one variable in +# the output NetCDF file). +# +{%- if (input_field_group in ['APCP', 'ASNOW']) %} +{{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_NAME = {{fieldname_in_met_output}}_{{accum_hh}} +{%- else %} +{{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_NAME = {{fieldname_in_met_output}} +{%- endif %} +# +# Accumulation interval available in the input data. +# +{{FCST_OR_OBS}}_PCP_COMBINE_INPUT_ACCUMS = 01 +# +# Accumulation interval to generate in the output file. +# +{{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_ACCUM = {{accum_hh}} +# +# If the output NetCDF file already exists, specify whether or not to +# skip the call to PcpCombine. +# +# In general, relaunching a task in the SRW App should recreate all the +# output from that task regardless of whether or not that output already +# exists. This is the case when running the PcpCombine task on forecasts. +# Thus, for forecasts, we set the skip flag to False. However, it turns +# out that when running PcpCombine on observations, it is necessary to +# skip the call to PcpCombine (i.e. NOT recreate output files) because +# in the SRW App's workflow, more than one cycle may want to create the +# same output observation file. This can happen if the forecast periods +# from two or more forecasts overlap, e.g. forecast 1 starts at 00Z of +# day one and forecast 2 starts at 00Z of day 2, and the forecasts are +# both 36 hours long, so the last 12 hours of forecast 1 overlap with the +# first 12 hours of forecast 2. In this case, there will be two workflow +# tasks that will try to create the observation APCP files for those 12 +# hours, and the files will be named exactly the same (because the output +# naming convention in this conf file is based on valid times). Thus, in +# order to avoid (1) duplicating work and (2) having two tasks accidentally +# trying to write to the same file (which will cause at least one task to +# fail), when running PcpCombine on observations we want to skip the call +# if the output observation file(s) (for a given forecast hour) already +# exist. For this reason, for observations we set the skip flag to False +# but set it to True for forecasts. +# +{%- if FCST_OR_OBS == 'FCST' %} +# Since this METplus configuration file takes forecast files as inputs, +# we set this flag to False. +# +PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS = False +{%- elif FCST_OR_OBS == 'OBS' %} +# Since this METplus configuration file takes observation files as inputs, +# we set this flag to True. +# +PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS = True +{%- endif %} + +{%- if FCST_OR_OBS == 'FCST' %} +# +# Maximum forecast lead to allow when searching for model data to use in +# PcpCombine. Default is a very large time (4000 years) so setting this +# to a valid maximum value can speed up execution time of numerous runs. +# +FCST_PCP_COMBINE_MAX_FORECAST = 2d +# +# Keep initialization time constant. +# +FCST_PCP_COMBINE_CONSTANT_INIT = True +{%- endif %} + +{%- if FCST_OR_OBS == 'OBS' %} +# +# Name to identify observation data in output. +# +OBTYPE = CCPA +{%- endif %} +# +# Specify file type of input data. +# +{{FCST_OR_OBS}}_PCP_COMBINE_INPUT_DATATYPE = GRIB + +# End of [config] section and start of [dir] section. +[dir] +# +# Directory containing input files. +# +{{FCST_OR_OBS}}_PCP_COMBINE_INPUT_DIR = {{input_dir}} +# +# Directory in which to write output from PcpCombine. +# +# OUTPUT_BASE apparently has to be set to something; it cannot be left +# to its default value. But it is not explicitly used elsewhere in this +# configuration file. +# +OUTPUT_BASE = {{output_base}} +{{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_DIR = {{output_dir}} +# +# Directory for staging data. +# +STAGING_DIR = {{staging_dir}} + +# End of [dir] section and start of [filename_templates] section. +[filename_templates] +# +# Input file name template relative to {{FCST_OR_OBS}}_PCP_COMBINE_INPUT_DIR. +# +{{FCST_OR_OBS}}_PCP_COMBINE_INPUT_TEMPLATE = {{input_fn_template}} +# +# Output file name template relative to {{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_DIR. +# +{{FCST_OR_OBS}}_PCP_COMBINE_OUTPUT_TEMPLATE = {{output_fn_template}} diff --git a/parm/metplus/PcpCombine_fcst_APCP.conf b/parm/metplus/PcpCombine_fcst_APCP.conf deleted file mode 100644 index 64fe0b4fcf..0000000000 --- a/parm/metplus/PcpCombine_fcst_APCP.conf +++ /dev/null @@ -1,130 +0,0 @@ -# PcpCombine METplus Configuration for Forecasts - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PcpCombine - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} - -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {FCST_PCP_COMBINE_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Run PcpCombine on forecast data but not observation (observation input -# files are not provided). -# -OBS_PCP_COMBINE_RUN = False -FCST_PCP_COMBINE_RUN = True -# -# Mode of PcpCombine to use (SUM, ADD, SUBTRACT, DERIVE, or CUSTOM). -# -FCST_PCP_COMBINE_METHOD = ADD -# -# Specify how to name the array in the NetCDF file that PcpCombine -# generates. -# -FCST_PCP_COMBINE_OUTPUT_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -# -# Accumulation interval available in the forecast input data. -# -FCST_PCP_COMBINE_INPUT_ACCUMS = 01 -# -# Accumulation interval to generate in the output file. -# -FCST_PCP_COMBINE_OUTPUT_ACCUM = {{accum_hh}} -# -# If the "bucket" output NetCDF file already exists, DON'T skip the call -# to PcpCombine. -# -# In general, we want to recreate the files when the SRW App workflow -# task that uses this METplus configuration file is relaunched. -# -PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS = False - -# Maximum forecast lead to allow when searching for model data to use in -# PcpCombine. Default is a very large time (4000 years) so setting this -# to a valid maximum value can speed up execution time of numerous runs. -FCST_PCP_COMBINE_MAX_FORECAST = 2d - -# Keep initialization time constant. -FCST_PCP_COMBINE_CONSTANT_INIT = True - -FCST_PCP_COMBINE_INPUT_DATATYPE = GRIB - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing forecast input to PcpCombine. -# -FCST_PCP_COMBINE_INPUT_DIR = {{fcst_input_dir}} -# -# Directory in which to write output from PcpCombine. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -FCST_PCP_COMBINE_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for forecast input to PcpCombine relative to -# FCST_PCP_COMBINE_INPUT_DIR. -# -FCST_PCP_COMBINE_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from PcpCombine relative to -# FCST_PCP_COMBINE_OUTPUT_DIR. -# -FCST_PCP_COMBINE_OUTPUT_TEMPLATE = {{output_fn_template}} diff --git a/parm/metplus/PcpCombine_fcst_ASNOW.conf b/parm/metplus/PcpCombine_fcst_ASNOW.conf deleted file mode 100644 index 91a6a70abb..0000000000 --- a/parm/metplus/PcpCombine_fcst_ASNOW.conf +++ /dev/null @@ -1,141 +0,0 @@ -# PcpCombine METplus Configuration for Forecasts - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PcpCombine - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} - -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {FCST_PCP_COMBINE_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Run PcpCombine on forecast data but not observation (observation input -# files are not provided). -# -OBS_PCP_COMBINE_RUN = False -FCST_PCP_COMBINE_RUN = True -# -# Mode of PcpCombine to use (SUM, ADD, SUBTRACT, DERIVE, or CUSTOM). -# -FCST_PCP_COMBINE_METHOD = ADD -# -# Specify name of variable for Snowfall Accumulation. -# NOTE: Currently TSNOWP is used which is a constant-density estimate of snowfall accumulation. -# In future RRFS development, a GSL product with variable-density snowfall accumulation -# is planned for UPP. When that is included and turned on in post, this variable may be changed -# to ASNOW. -# -FCST_PCP_COMBINE_INPUT_NAMES=TSNOWP - -FCST_PCP_COMBINE_INPUT_LEVELS = A01 -# -# Specify how to name the array in the NetCDF file that PcpCombine -# generates. -# -FCST_PCP_COMBINE_OUTPUT_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -# -# Accumulation interval available in the forecast input data. -# -FCST_PCP_COMBINE_INPUT_ACCUMS = 01 -# -# Accumulation interval to generate in the output file. -# -FCST_PCP_COMBINE_OUTPUT_ACCUM = {{accum_hh}} - -# If the "bucket" output NetCDF file already exists, DON'T skip the call -# to PcpCombine. -# -# In general, we want to recreate the files when the SRW App workflow -# task that uses this METplus configuration file is relaunched. -# -PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS = False - -# Maximum forecast lead to allow when searching for model data to use in -# PcpCombine. Default is a very large time (4000 years) so setting this -# to a valid maximum value can speed up execution time of numerous runs. -FCST_PCP_COMBINE_MAX_FORECAST = 2d - -# Keep initialization time constant. -FCST_PCP_COMBINE_CONSTANT_INIT = True - -FCST_PCP_COMBINE_INPUT_DATATYPE = GRIB -#FCST_NATIVE_DATA_TYPE = GRIB - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing forecast input to PcpCombine. -# -FCST_PCP_COMBINE_INPUT_DIR = {{fcst_input_dir}} -# -# Directory in which to write output from PcpCombine. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -FCST_PCP_COMBINE_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for forecast input to PcpCombine relative to -# FCST_PCP_COMBINE_INPUT_DIR. -# -FCST_PCP_COMBINE_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from PCPCOMBINE relative to -# FCST_PCP_COMBINE_OUTPUT_DIR. -# -FCST_PCP_COMBINE_OUTPUT_TEMPLATE = {{output_fn_template}} diff --git a/parm/metplus/PcpCombine_obs_APCP.conf b/parm/metplus/PcpCombine_obs_APCP.conf deleted file mode 100644 index cea6809597..0000000000 --- a/parm/metplus/PcpCombine_obs_APCP.conf +++ /dev/null @@ -1,139 +0,0 @@ -# PcpCombine METplus Configuration for Observations - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PcpCombine - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} - -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {OBS_PCP_COMBINE_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Run PcpCombine on observation data but not forecast (forecast input -# files are not provided). -# -OBS_PCP_COMBINE_RUN = True -FCST_PCP_COMBINE_RUN = False -# -# Mode of PcpCombine to use (SUM, ADD, SUBTRACT, DERIVE, or CUSTOM). -# -OBS_PCP_COMBINE_METHOD = ADD -# -# Specify how to name the array in the NetCDF file that PcpCombine -# generates. -# -OBS_PCP_COMBINE_OUTPUT_NAME = {{fieldname_in_met_output}}_{{accum_hh}} -# -# Accumulation interval available in the observation input data. -# -OBS_PCP_COMBINE_INPUT_ACCUMS = 01 -# -# Accumulation interval to generate in the output file. -# -OBS_PCP_COMBINE_OUTPUT_ACCUM = {{accum_hh}} -# -# If the "bucket" output NetCDF file already exists, skip the call to -# PcpCombine. -# -# In general, we want to recreate the files when the SRW App workflow -# task that uses this METplus configuration file is relaunched. In this -# case, however, it is necessary to skip the call to PcpCombine because -# in the SRW App's workflow, more than one cycle may want to create the -# same file. This can happen if the forecast periods from two or more -# forecasts overlap, e.g. forecast 1 starts at 00Z of day one and forecast -# 2 starts at 00Z of day 2, and the forecasts are both 36 hours long, so -# the last 12 hours of forecast 1 overlap with the first 12 hours of -# forecast 2. In this case, there will be two workflow tasks that will -# try to create the observation APCP files for those 12 hours, and the -# files will be named exactly the same (because the output naming convention -# in this conf file uses valid times). In order to (1) avoid duplicating -# work and (2) having two tasks accidentally trying to write to the same -# file (which will cause at least one task to fail), we do not call -# PcpCombine if the output file (for a given forecast hour) already -# exists. -# -PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS = True -# -# Name to identify observation data in output. -# -OBTYPE = CCPA -OBS_PCP_COMBINE_INPUT_DATA_TYPE = GRIB - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to PcpCombine. -# -OBS_PCP_COMBINE_INPUT_DIR = {{obs_input_dir}} -# -# Directory in which to write output from PcpCombine. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -OBS_PCP_COMBINE_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to PcpCombine relative to -# OBS_PCP_COMBINE_INPUT_DIR. -# -OBS_PCP_COMBINE_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for output from PcpCombine relative to -# OBS_PCP_COMBINE_OUTPUT_DIR. -# -OBS_PCP_COMBINE_OUTPUT_TEMPLATE = {{output_fn_template}} diff --git a/parm/metplus/PointStat_ADPSFC.conf b/parm/metplus/PointStat_ADPSFC.conf deleted file mode 100644 index 6d94e0bed9..0000000000 --- a/parm/metplus/PointStat_ADPSFC.conf +++ /dev/null @@ -1,378 +0,0 @@ -# PointStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PointStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {POINT_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to PointStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -POINT_STAT_CONFIG_FILE = {PARM_BASE}/met_config/PointStatConfig_wrapped - -POINT_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA -#POINT_STAT_OBS_QUALITY_EXC = - -POINT_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST -#POINT_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = - -#POINT_STAT_INTERP_VLD_THRESH = -#POINT_STAT_INTERP_SHAPE = -POINT_STAT_INTERP_TYPE_METHOD = BILIN -POINT_STAT_INTERP_TYPE_WIDTH = 2 - -POINT_STAT_OUTPUT_FLAG_FHO = STAT -POINT_STAT_OUTPUT_FLAG_CTC = STAT -POINT_STAT_OUTPUT_FLAG_CTS = STAT -#POINT_STAT_OUTPUT_FLAG_MCTC = -#POINT_STAT_OUTPUT_FLAG_MCTS = -POINT_STAT_OUTPUT_FLAG_CNT = STAT -POINT_STAT_OUTPUT_FLAG_SL1L2 = STAT -#POINT_STAT_OUTPUT_FLAG_SAL1L2 = -POINT_STAT_OUTPUT_FLAG_VL1L2 = STAT -#POINT_STAT_OUTPUT_FLAG_VAL1L2 = -POINT_STAT_OUTPUT_FLAG_VCNT = STAT -#POINT_STAT_OUTPUT_FLAG_PCT = -#POINT_STAT_OUTPUT_FLAG_PSTD = -#POINT_STAT_OUTPUT_FLAG_PJC = -#POINT_STAT_OUTPUT_FLAG_PRC = -#POINT_STAT_OUTPUT_FLAG_ECNT = -#POINT_STAT_OUTPUT_FLAG_RPS = -#POINT_STAT_OUTPUT_FLAG_ECLV = -#POINT_STAT_OUTPUT_FLAG_MPR = -#POINT_STAT_OUTPUT_FLAG_ORANK = - -POINT_STAT_CLIMO_CDF_BINS = 1 -#POINT_STAT_CLIMO_CDF_CENTER_BINS = False -#POINT_STAT_CLIMO_CDF_WRITE_BINS = True - -#POINT_STAT_HSS_EC_VALUE = - -# -# Observation data time window(s). -# -OBS_WINDOW_BEGIN = -1799 -OBS_WINDOW_END = 1800 -OBS_POINT_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} -OBS_POINT_STAT_WINDOW_END = {OBS_WINDOW_END} - -# Optional list of offsets to look for point observation data -POINT_STAT_OFFSETS = 0 -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the forecast ensemble member. This -# makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_{{ensmem_name}} -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. Here, -# we store the value of the original lead in this column, i.e. the lead -# with zero corresponding to the actual start time of the forecast (which -# is (cdate - time_lag)), not to cdate. This is just the lead in -# LEAD_SEQ with the time lag (time_lag) of the current forecast member -# added on. -# -# Uncomment this line only after upgrading to METplus 5.x. -#POINT_STAT_DESC = {lead?fmt=%H%M%S?shift={{time_lag}}} -POINT_STAT_DESC = NA - -# Regrid to specified grid. Indicate NONE if no regridding, or the grid id -# (e.g. G212) -POINT_STAT_REGRID_TO_GRID = NONE -POINT_STAT_REGRID_METHOD = BILIN -POINT_STAT_REGRID_WIDTH = 2 - -POINT_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# sets the -obs_valid_beg command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H} - -# sets the -obs_valid_end command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_END = {valid?fmt=%Y%m%d_%H} - -# Verification Masking regions -# Indicate which grid and polygon masking region, if applicable -POINT_STAT_GRID = - -# List of full path to poly masking files. NOTE: Only short lists of poly -# files work (those that fit on one line), a long list will result in an -# environment variable that is too long, resulting in an error. For long -# lists of poly masking files (i.e. all the mask files in the NCEP_mask -# directory), define these in the METplus PointStat configuration file. -POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly -POINT_STAT_STATION_ID = - -# Message types, if all message types are to be returned, leave this empty, -# otherwise indicate the message types of interest. -POINT_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} - -# set to True to run PointStat once for each name/level combination -# set to False to run PointStat once per run time including all fields -POINT_STAT_ONCE_PER_FIELD = False -# -# List of forecast and corresponding observation fields to process. -# -# Note on use of set_attr_lead and ensemble member time-lagging: -# ------------------------------------------------------------- -# The set_attr_lead parameter appearing below in [FCST|OBS]_VAR_OPTIONS -# specifies the lead to use both in naming of the output .stat and .nc -# files and for setting the lead values contained in those files. This -# option causes MET/METplus to use the lead values in the variable LEAD_SEQ -# set above, which are the same for all ensemble forecast members (i.e. -# regardless of whether members are time lagged with respect to the -# nominal cycle date specified by cdate). If set_attr_lead were not -# specified as below, then MET/METplus would get the lead from the input -# forecast file, and that would in general differ from one ensemble member -# to the next depending on whether the member is time-lagged. That would -# cause confusion, so here, we always use lead values with zero lead -# corresponding to the nominal cdate. -# -FCST_VAR1_NAME = TMP -FCST_VAR1_LEVELS = Z2 -FCST_VAR1_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR1_NAME = TMP -OBS_VAR1_LEVELS = Z2 - -FCST_VAR2_NAME = DPT -FCST_VAR2_LEVELS = Z2 -FCST_VAR2_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR2_NAME = DPT -OBS_VAR2_LEVELS = Z2 - -FCST_VAR3_NAME = RH -FCST_VAR3_LEVELS = Z2 -FCST_VAR3_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR3_NAME = RH -OBS_VAR3_LEVELS = Z2 - -FCST_VAR4_NAME = UGRD -FCST_VAR4_LEVELS = Z10 -FCST_VAR4_THRESH = ge2.572 -FCST_VAR4_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR4_NAME = UGRD -OBS_VAR4_LEVELS = Z10 -OBS_VAR4_THRESH = ge2.572 - -FCST_VAR5_NAME = VGRD -FCST_VAR5_LEVELS = Z10 -FCST_VAR5_THRESH = ge2.572 -FCST_VAR5_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR5_NAME = VGRD -OBS_VAR5_LEVELS = Z10 -OBS_VAR5_THRESH = ge2.572 - -FCST_VAR6_NAME = WIND -FCST_VAR6_LEVELS = Z10 -FCST_VAR6_THRESH = ge2.572, ge2.572&<5.144, ge5.144, ge10.288, ge15.433 -FCST_VAR6_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; - GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind. -OBS_VAR6_NAME = WIND -OBS_VAR6_LEVELS = Z10 -OBS_VAR6_THRESH = ge2.572, ge2.572&<5.144, ge5.144, ge10.288, ge15.433 -OBS_VAR6_OPTIONS = GRIB2_pdt = 0; ;; Derive instantaneous 10-m wind from U/V components, overriding max 10-m wind. - -FCST_VAR7_NAME = PRMSL -FCST_VAR7_LEVELS = Z0 -FCST_VAR7_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR7_NAME = PRMSL -OBS_VAR7_LEVELS = Z0 - -FCST_VAR8_NAME = TCDC -FCST_VAR8_LEVELS = L0 -FCST_VAR8_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; - GRIB_lvl_typ = 200; - GRIB2_ipdtmpl_index=[27]; - GRIB2_ipdtmpl_val=[255]; - interp = { type = [ { method = NEAREST; width = 1; } ]; } -OBS_VAR8_NAME = TCDC -OBS_VAR8_LEVELS = L0 - -FCST_VAR9_NAME = VIS -FCST_VAR9_LEVELS = L0 -FCST_VAR9_THRESH = lt805, lt1609, lt4828, lt8045, ge8045, lt16090 -FCST_VAR9_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; - censor_thresh = [>16090]; - censor_val = [16090]; - interp = { type = [ { method = NEAREST; width = 1; } ]; } -OBS_VAR9_NAME = VIS -OBS_VAR9_LEVELS = L0 -OBS_VAR9_THRESH = lt805, lt1609, lt4828, lt8045, ge8045, lt16090 -OBS_VAR9_OPTIONS = censor_thresh = [>16090]; - censor_val = [16090]; - interp = { type = [ { method = NEAREST; width = 1; } ]; } - -FCST_VAR10_NAME = GUST -FCST_VAR10_LEVELS = Z0 -FCST_VAR10_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR10_NAME = GUST -OBS_VAR10_LEVELS = Z0 - -FCST_VAR11_NAME = HGT -FCST_VAR11_LEVELS = L0 -FCST_VAR11_THRESH = lt152, lt305, lt914, lt1520, lt3040, ge914 -FCST_VAR11_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; - GRIB_lvl_typ = 215; - desc = "CEILING"; -OBS_VAR11_NAME = CEILING -OBS_VAR11_LEVELS = L0 -OBS_VAR11_THRESH = lt152, lt305, lt914, lt1520, lt3040, ge914 -OBS_VAR11_OPTIONS = GRIB_lvl_typ = 215; - interp = { type = [ { method = NEAREST; width = 1; } ]; } - -FCST_VAR12_NAME = SPFH -FCST_VAR12_LEVELS = Z2 -OBS_VAR12_NAME = SPFH -OBS_VAR12_LEVELS = Z2 - -FCST_VAR13_NAME = CRAIN -FCST_VAR13_LEVELS = L0 -FCST_VAR13_THRESH = ge1.0 -OBS_VAR13_NAME = PRWE -OBS_VAR13_LEVELS = Z0 -OBS_VAR13_THRESH = ge161&&le163 - -FCST_VAR14_NAME = CSNOW -FCST_VAR14_LEVELS = L0 -FCST_VAR14_THRESH = ge1.0 -OBS_VAR14_NAME = PRWE -OBS_VAR14_LEVELS = Z0 -OBS_VAR14_THRESH = ge171&&le173 - -FCST_VAR15_NAME = CFRZR -FCST_VAR15_LEVELS = L0 -FCST_VAR15_THRESH = ge1.0 -OBS_VAR15_NAME = PRWE -OBS_VAR15_LEVELS = Z0 -OBS_VAR15_THRESH = ge164&&le166 - -FCST_VAR16_NAME = CICEP -FCST_VAR16_LEVELS = L0 -FCST_VAR16_THRESH = ge1.0 -OBS_VAR16_NAME = PRWE -OBS_VAR16_LEVELS = Z0 -OBS_VAR16_THRESH = ge174&&le176 - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to PointStat. -# -OBS_POINT_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to PointStat. -# -FCST_POINT_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from PointStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -POINT_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to PointStat relative to -# OBS_POINT_STAT_INPUT_DIR. -# -OBS_POINT_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to PointStat relative to -# FCST_POINT_STAT_INPUT_DIR. -# -FCST_POINT_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from PointStat relative to POINT_STAT_OUTPUT_DIR. -# -POINT_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_TEMPLATE = diff --git a/parm/metplus/PointStat_ADPUPA.conf b/parm/metplus/PointStat_ADPUPA.conf deleted file mode 100644 index 519767a51e..0000000000 --- a/parm/metplus/PointStat_ADPUPA.conf +++ /dev/null @@ -1,343 +0,0 @@ -# PointStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PointStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {POINT_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to PointStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -POINT_STAT_CONFIG_FILE = {PARM_BASE}/met_config/PointStatConfig_wrapped - -POINT_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA -#POINT_STAT_OBS_QUALITY_EXC = - -POINT_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST -#POINT_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = - -#POINT_STAT_INTERP_VLD_THRESH = -#POINT_STAT_INTERP_SHAPE = -POINT_STAT_INTERP_TYPE_METHOD = BILIN -POINT_STAT_INTERP_TYPE_WIDTH = 2 - -POINT_STAT_OUTPUT_FLAG_FHO = STAT -POINT_STAT_OUTPUT_FLAG_CTC = STAT -POINT_STAT_OUTPUT_FLAG_CTS = STAT -#POINT_STAT_OUTPUT_FLAG_MCTC = -#POINT_STAT_OUTPUT_FLAG_MCTS = -POINT_STAT_OUTPUT_FLAG_CNT = STAT -POINT_STAT_OUTPUT_FLAG_SL1L2 = STAT -#POINT_STAT_OUTPUT_FLAG_SAL1L2 = -POINT_STAT_OUTPUT_FLAG_VL1L2 = STAT -#POINT_STAT_OUTPUT_FLAG_VAL1L2 = -POINT_STAT_OUTPUT_FLAG_VCNT = STAT -#POINT_STAT_OUTPUT_FLAG_PCT = -#POINT_STAT_OUTPUT_FLAG_PSTD = -#POINT_STAT_OUTPUT_FLAG_PJC = -#POINT_STAT_OUTPUT_FLAG_PRC = -#POINT_STAT_OUTPUT_FLAG_ECNT = -#POINT_STAT_OUTPUT_FLAG_RPS = -#POINT_STAT_OUTPUT_FLAG_ECLV = -#POINT_STAT_OUTPUT_FLAG_MPR = -#POINT_STAT_OUTPUT_FLAG_ORANK = - -POINT_STAT_CLIMO_CDF_BINS = 1 -#POINT_STAT_CLIMO_CDF_CENTER_BINS = False -#POINT_STAT_CLIMO_CDF_WRITE_BINS = True - -#POINT_STAT_HSS_EC_VALUE = - -# -# Observation data time window(s). -# -OBS_WINDOW_BEGIN = -1799 -OBS_WINDOW_END = 1800 -OBS_POINT_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} -OBS_POINT_STAT_WINDOW_END = {OBS_WINDOW_END} - -# Optional list of offsets to look for point observation data -POINT_STAT_OFFSETS = 0 -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the forecast ensemble member. This -# makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_{{ensmem_name}} -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. Here, -# we store the value of the original lead in this column, i.e. the lead -# with zero corresponding to the actual start time of the forecast (which -# is (cdate - time_lag)), not to cdate. This is just the lead in -# LEAD_SEQ with the time lag (time_lag) of the current forecast member -# added on. -# -# Uncomment this line only after upgrading to METplus 5.x. -#POINT_STAT_DESC = {lead?fmt=%H%M%S?shift={{time_lag}}} -POINT_STAT_DESC = NA - -# Regrid to specified grid. Indicate NONE if no regridding, or the grid id -# (e.g. G212) -POINT_STAT_REGRID_TO_GRID = NONE -POINT_STAT_REGRID_METHOD = BILIN -POINT_STAT_REGRID_WIDTH = 2 - -POINT_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# sets the -obs_valid_beg command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H} - -# sets the -obs_valid_end command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_END = {valid?fmt=%Y%m%d_%H} - -# Verification Masking regions -# Indicate which grid and polygon masking region, if applicable -POINT_STAT_GRID = - -# List of full path to poly masking files. NOTE: Only short lists of poly -# files work (those that fit on one line), a long list will result in an -# environment variable that is too long, resulting in an error. For long -# lists of poly masking files (i.e. all the mask files in the NCEP_mask -# directory), define these in the METplus PointStat configuration file. -POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly -POINT_STAT_STATION_ID = - -# Message types, if all message types are to be returned, leave this empty, -# otherwise indicate the message types of interest. -POINT_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} - -# set to True to run PointStat once for each name/level combination -# set to False to run PointStat once per run time including all fields -POINT_STAT_ONCE_PER_FIELD = False -# -# List of forecast and corresponding observation fields to process. -# -# Note on use of set_attr_lead and ensemble member time-lagging: -# ------------------------------------------------------------- -# The set_attr_lead parameter appearing below in [FCST|OBS]_VAR_OPTIONS -# specifies the lead to use both in naming of the output .stat and .nc -# files and for setting the lead values contained in those files. This -# option causes MET/METplus to use the lead values in the variable LEAD_SEQ -# set above, which are the same for all ensemble forecast members (i.e. -# regardless of whether members are time lagged with respect to the -# nominal cycle date specified by cdate). If set_attr_lead were not -# specified as below, then MET/METplus would get the lead from the input -# forecast file, and that would in general differ from one ensemble member -# to the next depending on whether the member is time-lagged. That would -# cause confusion, so here, we always use lead values with zero lead -# corresponding to the nominal cdate. -# -FCST_VAR1_NAME = TMP -FCST_VAR1_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 -FCST_VAR1_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR1_NAME = TMP -OBS_VAR1_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 - -FCST_VAR2_NAME = RH -FCST_VAR2_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250 -FCST_VAR2_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR2_NAME = RH -OBS_VAR2_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250 - -FCST_VAR3_NAME = DPT -FCST_VAR3_LEVELS = P1000, P925, P850, P700, P500, P400, P300 -FCST_VAR3_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR3_NAME = DPT -OBS_VAR3_LEVELS = P1000, P925, P850, P700, P500, P400, P300 - -FCST_VAR4_NAME = UGRD -FCST_VAR4_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 -FCST_VAR4_THRESH = ge2.572 -FCST_VAR4_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR4_NAME = UGRD -OBS_VAR4_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 -OBS_VAR4_THRESH = ge2.572 - -FCST_VAR5_NAME = VGRD -FCST_VAR5_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 -FCST_VAR5_THRESH = ge2.572 -FCST_VAR5_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR5_NAME = VGRD -OBS_VAR5_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 -OBS_VAR5_THRESH = ge2.572 - -FCST_VAR6_NAME = WIND -FCST_VAR6_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 -FCST_VAR6_THRESH = ge2.572, ge2.572&<5.144, ge5.144, ge10.288, ge15.433, ge20.577, ge25.722 -FCST_VAR6_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR6_NAME = WIND -OBS_VAR6_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 -OBS_VAR6_THRESH = ge2.572, ge2.572&<5.144, ge5.144, ge10.288, ge15.433, ge20.577, ge25.722 - -FCST_VAR7_NAME = HGT -FCST_VAR7_LEVELS = P1000, P950, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 -FCST_VAR7_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR7_NAME = HGT -OBS_VAR7_LEVELS = P1000, P950, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10 - -FCST_VAR8_NAME = SPFH -FCST_VAR8_LEVELS = P1000, P850, P700, P500, P400, P300 -FCST_VAR8_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR8_NAME = SPFH -OBS_VAR8_LEVELS = P1000, P850, P700, P500, P400, P300 - -FCST_VAR9_NAME = CAPE -FCST_VAR9_LEVELS = L0 -FCST_VAR9_THRESH = gt500, gt1000, gt1500, gt2000, gt3000, gt4000 -FCST_VAR9_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; - cnt_thresh = [ >0 ]; -OBS_VAR9_NAME = CAPE -OBS_VAR9_LEVELS = L0-100000 -OBS_VAR9_THRESH = gt500, gt1000, gt1500, gt2000, gt3000, gt4000 -OBS_VAR9_OPTIONS = cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -FCST_VAR10_NAME = HPBL -FCST_VAR10_LEVELS = Z0 -FCST_VAR10_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; -OBS_VAR10_NAME = PBL -OBS_VAR10_LEVELS = L0 -OBS_VAR10_OPTIONS = desc = "TKE"; - -FCST_VAR11_NAME = HGT -FCST_VAR11_LEVELS = L0 -FCST_VAR11_OPTIONS = set_attr_lead = "{lead?fmt=%H%M%S}"; - GRIB_lvl_typ = 220; -OBS_VAR11_NAME = PBL -OBS_VAR11_LEVELS = L0 -OBS_VAR11_OPTIONS = desc = "RI"; - -FCST_VAR12_NAME = CAPE -FCST_VAR12_LEVELS = L0-90 -FCST_VAR12_THRESH = gt500, gt1000, gt1500, gt2000, gt3000, gt4000 -FCST_VAR12_OPTIONS = cnt_thresh = [ >0 ]; -OBS_VAR12_NAME = MLCAPE -OBS_VAR12_LEVELS = L0 -OBS_VAR12_THRESH = gt500, gt1000, gt1500, gt2000, gt3000, gt4000 -OBS_VAR12_OPTIONS = cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to PointStat. -# -OBS_POINT_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to PointStat. -# -FCST_POINT_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from PointStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -POINT_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to PointStat relative to -# OBS_POINT_STAT_INPUT_DIR. -# -OBS_POINT_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to PointStat relative to -# FCST_POINT_STAT_INPUT_DIR. -# -FCST_POINT_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from PointStat relative to POINT_STAT_OUTPUT_DIR. -# -POINT_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_TEMPLATE = diff --git a/parm/metplus/PointStat_ensmean.conf b/parm/metplus/PointStat_ensmean.conf new file mode 100644 index 0000000000..b16a481dbd --- /dev/null +++ b/parm/metplus/PointStat_ensmean.conf @@ -0,0 +1,564 @@ +# Ensemble mean {{MetplusToolName}} METplus Configuration + +[config] + +# List of applications (tools) to run. +PROCESS_LIST = {{MetplusToolName}} + +# time looping - options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +LOOP_BY = INIT + +# Format of INIT_BEG and INIT_END using % items +# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. +# see www.strftime.org for more information +# %Y%m%d%H expands to YYYYMMDDHH +INIT_TIME_FMT = %Y%m%d%H + +# Start time for METplus run - must match INIT_TIME_FMT +INIT_BEG = {{cdate}} + +# End time for METplus run - must match INIT_TIME_FMT +INIT_END = {{cdate}} + +# Increment between METplus runs (in seconds if no units are specified). +# Must be >= 60 seconds. +INIT_INCREMENT = 3600 + +# List of forecast leads to process for each run time (init or valid) +# In hours if units are not specified +# If unset, defaults to 0 (don't loop through forecast leads) +LEAD_SEQ = {{fhr_list}} +# +# Order of loops to process data - Options are times, processes +# Not relevant if only one item is in the PROCESS_LIST +# times = run all wrappers in the PROCESS_LIST for a single run time, then +# increment the run time and run all wrappers again until all times have +# been evaluated. +# processes = run the first wrapper in the PROCESS_LIST for all times +# specified, then repeat for the next item in the PROCESS_LIST until all +# wrappers have been run +# +LOOP_ORDER = times +# +# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. +# +LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} +# +# Specify the name of the METplus log file. +# +LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} +# +# Specify the location and name of the final METplus conf file. +# +METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}} +# +# Location of MET configuration file to pass to {{MetplusToolName}}. +# +# References PARM_BASE, which is the location of the parm directory +# corresponding to the ush directory of the run_metplus.py script that +# is called or the value of the environment variable METPLUS_PARM_BASE +# if set. +# +{{METPLUS_TOOL_NAME}}_CONFIG_FILE = {PARM_BASE}/met_config/{{MetplusToolName}}Config_wrapped + +{{METPLUS_TOOL_NAME}}_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA +#{{METPLUS_TOOL_NAME}}_OBS_QUALITY_EXC = + +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_TIME_INTERP_METHOD = + +#{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = BILIN +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 2 + +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_FHO = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTS = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTS = +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CNT = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SL1L2 = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SAL1L2 = +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VL1L2 = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VAL1L2 = +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VCNT = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PCT = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PSTD = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PJC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PRC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECNT = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_RPS = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MPR = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ORANK = + +{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1 +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = True + +#{{METPLUS_TOOL_NAME}}_HSS_EC_VALUE = + +# +# Observation data time window(s). +# +OBS_WINDOW_BEGIN = -1799 +OBS_WINDOW_END = 1800 +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_END = {OBS_WINDOW_END} + +# Optional list of offsets to look for point observation data +{{METPLUS_TOOL_NAME}}_OFFSETS = 0 +# +# Name to identify model (forecast) data in output. +# +# The variable MODEL is recorded in the stat files, and the data in +# these files is then plotted (e.g. using METViewer). Here, we add a +# suffix to MODEL that identifies the data as that for the ensemble +# mean. This makes it easier to identify each curve. +# +MODEL = {{vx_fcst_model_name}}_ensmean +# +# Name to identify observation data in output. +# +OBTYPE = {{obtype}} +# +# Value to enter under the DESC column in the output stat file. +# +{{METPLUS_TOOL_NAME}}_DESC = NA + +# Regrid to specified grid. Indicate NONE if no regridding, or the grid id +# (e.g. G212) +{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = NONE +{{METPLUS_TOOL_NAME}}_REGRID_METHOD = BILIN +{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2 + +{{METPLUS_TOOL_NAME}}_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} + +# sets the -obs_valid_beg command line argument (optional) +# not used for this example +#{{METPLUS_TOOL_NAME}}_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H} + +# sets the -obs_valid_end command line argument (optional) +# not used for this example +#{{METPLUS_TOOL_NAME}}_OBS_VALID_END = {valid?fmt=%Y%m%d_%H} + +# Verification Masking regions +# Indicate which grid and polygon masking region, if applicable +{{METPLUS_TOOL_NAME}}_GRID = + +# List of full path to poly masking files. NOTE: Only short lists of poly +# files work (those that fit on one line), a long list will result in an +# environment variable that is too long, resulting in an error. For long +# lists of poly masking files (i.e. all the mask files in the NCEP_mask +# directory), define these in the METplus {{MetplusToolName}} configuration file. +{{METPLUS_TOOL_NAME}}_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly +{{METPLUS_TOOL_NAME}}_STATION_ID = + +# Message types, if all message types are to be returned, leave this empty, +# otherwise indicate the message types of interest. +{{METPLUS_TOOL_NAME}}_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} + +# set to True to run {{MetplusToolName}} once for each name/level combination +# set to False to run {{MetplusToolName}} once per run time including all fields +{{METPLUS_TOOL_NAME}}_ONCE_PER_FIELD = False +# +# List of forecast and corresponding observation fields to process. +# +{#- +Import the file containing jinja macros. +#} +{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %} + +{#- +Set the probabilistic threshold to be used for the forecast field. If +necessary, this can be changed to be an input parameter in the calling +script instead of a hard-coded value as below. +#} +{%- set thresh_fcst_prob = '==0.1' %} + +{#- +Jinja requires certain variables to be defined globally within the template +before they can be used in if-statements and other scopes (see Jinja scoping +rules). Define such variables. +#} +{%- set indx_level_fcst = '' %} +{%- set indx_input_thresh_fcst = '' %} +{%- set error_msg = '' %} +{%- set opts_indent = '' %} +{%- set opts_indent_len = '' %} +{%- set tmp = '' %} + +{%- set field_fcst = '' %} +{%- set field_obs = '' %} +{%- set level_fcst = '' %} +{%- set level_obs = '' %} +{%- set thresh_fcst = '' %} +{%- set thresh_obs = '' %} + +{%- set threshes_fcst = [] %} +{%- set threshes_obs = [] %} + +{#- +Get the set of valid field groups and ensure that the specified input +field group appears in this list. +#} +{%- set valid_field_groups = vx_config_dict.keys()|list %} +{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }} + +{#- +Reset the input forecast level so that if it happens to be an accumulation +(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g. +reset to 'A3'). +#} +{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} + +{#- +Extract from the configuration dictionary the set (which itself is a +dictionary) of fields, levels, and thresholds corresponding to the input +field group. Then set the delimiter string that separates forecast and +observation values in the various items (i.e. dictionary keys and values +representing field names, levels, and thresholds) in this dictionary. +#} +{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %} +{%- set delim_str = metplus_macros.set_delim_str() %} + +{#- +Some fields in the specified field group (input_field_group) may need to +be excluded from the METplus config file because calculating means for +them doesn't make sense. List these (for each input_field_group) in the +following dictionary. +#} +{%- set fields_fcst_to_exclude_by_field_group = + {'APCP': [], + 'ASNOW': [], + 'REFC': [], + 'RETOP': [], + 'ADPSFC': ['TCDC', 'VIS', 'HGT'], + 'ADPUPA': []} %} +{%- set fields_fcst_to_exclude = fields_fcst_to_exclude_by_field_group[input_field_group] %} + +{#- +Remove from the dictionary fields_levels_threshes_cpld any fields that +are in the list to be excluded. +#} +{%- for field_cpld in fields_levels_threshes_cpld.copy() %} + + {%- if delim_str in field_cpld %} + {%- set field_fcst, field_obs = field_cpld.split(delim_str) %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- set field_obs = field_cpld %} + {%- endif %} + + {%- if field_fcst in fields_fcst_to_exclude %} + {%- set tmp = fields_levels_threshes_cpld.pop(field_cpld) %} + {%- endif %} + +{%- endfor %} + +{#- +Loop over the fields and set field names, levels, thresholds, and/or +options for each field, both for forecasts and for observations, in the +METplus configuration file. +#} +{%- set ns = namespace(var_count = 0) %} +{%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %} + + {%- if delim_str in field_cpld %} + {%- set field_fcst, field_obs = field_cpld.split(delim_str) %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- set field_obs = field_cpld %} + {%- endif %} + +{#- +For convenience, create lists of valid forecast and observation levels +for the current field. +#} + {%- set valid_levels_fcst = [] %} + {%- set valid_levels_obs = [] %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + {%- set tmp = valid_levels_fcst.append(level_fcst) %} + {%- set tmp = valid_levels_obs.append(level_obs) %} + {%- endfor %} + +{#- +Make sure that the input forecast level (input_level_fcst) is set to a +valid value. +#} + {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %} + {%- set error_msg = '\n' ~ +'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~ +'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~ +'for the current forecast field (field_fcst). This is not the case:\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~ +' input_level_fcst = ' ~ input_level_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + +{#- +Loop over the (coupled) levels and corresponding lists of thresholds. +Extract from these the level values for forecasts and observations and +use them to set the forecast and observation field names, levels, +thresholds, and/or options in the METplus configuration file. +#} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + + {%- set valid_threshes_fcst = [] %} + {%- set valid_threshes_obs = [] %} + {%- for thresh_cpld in threshes_cpld %} + {%- if delim_str in thresh_cpld %} + {%- set thresh_fcst, thresh_obs = thresh_cpld.split(delim_str) %} + {%- else %} + {%- set thresh_fcst = thresh_cpld %} + {%- set thresh_obs = thresh_cpld %} + {%- endif %} + {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %} + {%- set tmp = valid_threshes_obs.append(thresh_obs) %} + {%- endfor %} + + {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %} +{#- +Increment the METplus variable counter. +#} + {%- set ns.var_count = ns.var_count+1 %} + +{#- +Set forecast field name. +#} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{level_fcst}}_ENS_MEAN + +{#- +Set forecast field level. +#} +FCST_VAR{{ns.var_count}}_LEVELS = {{level_fcst}} + +{#- +Set forecast field threshold(s). Note that no forecast thresholds are +included in the METplus configuration file if input_thresh_fcst is set +to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +If input_thresh_fcst is set to 'all', set the list of forecast thresholds +to the full set of valid values. +#} + {%- if (input_thresh_fcst == 'all') %} + + {%- set threshes_fcst = valid_threshes_fcst %} +{#- +If input_thresh_fcst is set to a specific value: +* If that value is valid, i.e. it exists in the list of valid forecast + thresholds, get its index in that list and use it to set the forecast + threshold to a one-element list containing that value. Note that the + index will be needed later below when setting the observation threshold(s). +* If the input forecast threshold is not valid, print out a warning message + and exit. +#} + {%- else %} + + {%- if input_thresh_fcst in valid_threshes_fcst %} + {%- set indx_input_thresh_fcst = valid_threshes_fcst.index(input_thresh_fcst) %} + {%- set threshes_fcst = [valid_threshes_fcst[indx_input_thresh_fcst]] %} + {%- else %} + {%- set error_msg = '\n' ~ +'For the current forecast field (field_fcst) and forecast level (level_fcst),\n' ~ +'the input forecast threshold (input_thresh_fcst) does not exist in the list\n' ~ +'of valid forecast thresholds (valid_threshes_fcst):\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' level_fcst = ' ~ level_fcst ~ '\n' ~ +' valid_threshes_fcst = ' ~ valid_threshes_fcst ~ '\n' ~ +' input_thresh_fcst = ' ~ input_thresh_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + + {%- endif %} +{#- +If threshes_fcst has been reset to something other than its default +value of an empty list, then set the forecast thresholds in the METplus +configuration file because that implies threshes_fcst was set above to +a non-empty value. Then reset threshes_fcst to its default value for +proper processing of thresholds for the next field. +#} + {%- if (threshes_fcst != []) %} +FCST_VAR{{ns.var_count}}_THRESH = {{threshes_fcst|join(', ')}} + {%- endif %} + {%- set threshes_fcst = [] %} + + {%- endif %} + +{#- +Set forecast field options. +#} + {%- set opts_indent_len = 20 %} + {%- if (ns.var_count > 9) and (ns.var_count <= 99) %} + {%- set opts_indent_len = opts_indent_len + 1 %} + {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %} + {%- set opts_indent_len = opts_indent_len + 2 %} + {%- elif (ns.var_count > 999) %} + {%- set opts_indent_len = opts_indent_len + 3 %} + {%- endif %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'ADPUPA' %} + + {%- if field_fcst == 'CAPE' %} +FCST_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; + {%- endif %} + + {%- endif %} + +{#- +Set observation field name. +#} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}} + +{#- +Set observation field level. +#} + {%- set indx_level_fcst = valid_levels_fcst.index(level_fcst) %} + {%- set level_obs = valid_levels_obs[indx_level_fcst] %} +OBS_VAR{{ns.var_count}}_LEVELS = {{level_obs}} + +{#- +Set observation field threshold(s). Note that no observation thresholds +are included in the METplus configuration file if input_thresh_fcst is +set to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +If input_thresh_fcst is set to 'all', set the list of observation thresholds +to the full set of valid values. +#} + {%- if (input_thresh_fcst == 'all') %} + + {%- set threshes_obs = valid_threshes_obs %} +{#- +If input_thresh_fcst is set to a specific forecast threshold, then the +observation threshold is given by the element in the list of valid +observation thresholds that has the same index as that of input_thresh_fcst +in the list of valid forecast thresholds. +#} + {%- else %} + {%- set threshes_obs = [valid_threshes_obs[indx_input_thresh_fcst]] %} + {%- endif %} +{#- +If threshes_obs has been reset to something other than its default value +of an empty list, then set the observation thresholds in the METplus +configuration file because that implies threshes_obs was set above to +a non-empty value. Then reset threshes_obs to its default value for +proper processing of thresholds for the next field. +#} + {%- if (threshes_obs != []) %} +OBS_VAR{{ns.var_count}}_THRESH = {{threshes_obs|join(', ')}} + {%- endif %} + {%- set threshes_obs = [] %} + + {%- endif %} + +{#- +Set observation field options. +#} + {%- set opts_indent_len = opts_indent_len - 1 %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'ADPUPA' %} + + {%- if field_obs == 'CAPE' %} +OBS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; +{{opts_indent}}cnt_logic = UNION; + {%- elif field_obs == 'PBL' %} +OBS_VAR{{ns.var_count}}_OPTIONS = desc = "TKE"; + {%- endif %} + + {%- endif %} + +{#- +Print out a newline to separate the settings for the current field (both +forecast and observation settings) from those for the next field. +#} + {{- '\n' }} + + {%- endif %} + + {%- endfor %} +{%- endfor %} +# End of [config] section and start of [dir] section. +[dir] +# +# Directory containing observation input to {{MetplusToolName}}. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{obs_input_dir}} +# +# Directory containing forecast input to {{MetplusToolName}}. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}} +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR = +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR = +# +# Directory in which to write output from {{MetplusToolName}}. +# +# OUTPUT_BASE apparently has to be set to something; it cannot be left +# to its default value. But it is not explicitly used elsewhere in this +# configuration file. +# +OUTPUT_BASE = {{output_base}} +{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}} +# +# Directory for staging data. +# +STAGING_DIR = {{staging_dir}} + +# End of [dir] section and start of [filename_templates] section. +[filename_templates] +# +# Template for observation input to {{MetplusToolName}} relative to +# OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{obs_input_fn_template}} +# +# Template for forecast input to {{MetplusToolName}} relative to +# FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}} +# +# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR. +# +{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_TEMPLATE = diff --git a/parm/metplus/PointStat_ensmean_ADPSFC.conf b/parm/metplus/PointStat_ensmean_ADPSFC.conf deleted file mode 100644 index 6b7e7e9cff..0000000000 --- a/parm/metplus/PointStat_ensmean_ADPSFC.conf +++ /dev/null @@ -1,252 +0,0 @@ -# Ensemble mean PointStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PointStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {POINT_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to PointStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -POINT_STAT_CONFIG_FILE = {PARM_BASE}/met_config/PointStatConfig_wrapped - -POINT_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA -#POINT_STAT_OBS_QUALITY_EXC = - -POINT_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST -#POINT_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = - -#POINT_STAT_INTERP_VLD_THRESH = -#POINT_STAT_INTERP_SHAPE = -POINT_STAT_INTERP_TYPE_METHOD = BILIN -POINT_STAT_INTERP_TYPE_WIDTH = 2 - -#POINT_STAT_OUTPUT_FLAG_FHO = -#POINT_STAT_OUTPUT_FLAG_CTC = -#POINT_STAT_OUTPUT_FLAG_CTS = -#POINT_STAT_OUTPUT_FLAG_MCTC = -#POINT_STAT_OUTPUT_FLAG_MCTS = -POINT_STAT_OUTPUT_FLAG_CNT = STAT -POINT_STAT_OUTPUT_FLAG_SL1L2 = STAT -#POINT_STAT_OUTPUT_FLAG_SAL1L2 = -POINT_STAT_OUTPUT_FLAG_VL1L2 = STAT -#POINT_STAT_OUTPUT_FLAG_VAL1L2 = -POINT_STAT_OUTPUT_FLAG_VCNT = STAT -#POINT_STAT_OUTPUT_FLAG_PCT = -#POINT_STAT_OUTPUT_FLAG_PSTD = -#POINT_STAT_OUTPUT_FLAG_PJC = -#POINT_STAT_OUTPUT_FLAG_PRC = -#POINT_STAT_OUTPUT_FLAG_ECNT = -#POINT_STAT_OUTPUT_FLAG_RPS = -#POINT_STAT_OUTPUT_FLAG_ECLV = -#POINT_STAT_OUTPUT_FLAG_MPR = -#POINT_STAT_OUTPUT_FLAG_ORANK = - -POINT_STAT_CLIMO_CDF_BINS = 1 -#POINT_STAT_CLIMO_CDF_CENTER_BINS = False -#POINT_STAT_CLIMO_CDF_WRITE_BINS = True - -#POINT_STAT_HSS_EC_VALUE = - -# -# Observation data time window(s). -# -OBS_WINDOW_BEGIN = -1799 -OBS_WINDOW_END = 1800 -OBS_POINT_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} -OBS_POINT_STAT_WINDOW_END = {OBS_WINDOW_END} - -# Optional list of offsets to look for point observation data -POINT_STAT_OFFSETS = 0 -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as that for the ensemble -# mean. This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensmean -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -POINT_STAT_DESC = NA - -# Regrid to specified grid. Indicate NONE if no regridding, or the grid id -# (e.g. G212) -POINT_STAT_REGRID_TO_GRID = NONE -POINT_STAT_REGRID_METHOD = BILIN -POINT_STAT_REGRID_WIDTH = 2 - -POINT_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# sets the -obs_valid_beg command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H} - -# sets the -obs_valid_end command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_END = {valid?fmt=%Y%m%d_%H} - -# Verification Masking regions -# Indicate which grid and polygon masking region, if applicable -POINT_STAT_GRID = - -# List of full path to poly masking files. NOTE: Only short lists of poly -# files work (those that fit on one line), a long list will result in an -# environment variable that is too long, resulting in an error. For long -# lists of poly masking files (i.e. all the mask files in the NCEP_mask -# directory), define these in the METplus PointStat configuration file. -POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly -POINT_STAT_STATION_ID = - -# Message types, if all message types are to be returned, leave this empty, -# otherwise indicate the message types of interest. -POINT_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} - -# set to True to run PointStat once for each name/level combination -# set to False to run PointStat once per run time including all fields -POINT_STAT_ONCE_PER_FIELD = False -# -# List of forecast and corresponding observation fields to process. -# -FCST_VAR1_NAME = TMP_Z2_ENS_MEAN -FCST_VAR1_LEVELS = Z2 -FCST_VAR1_THRESH = ge268, ge273, ge278, ge293, ge298, ge303 -OBS_VAR1_NAME = TMP -OBS_VAR1_LEVELS = Z2 -OBS_VAR1_THRESH = ge268, ge273, ge278, ge293, ge298, ge303 - -FCST_VAR2_NAME = DPT_Z2_ENS_MEAN -FCST_VAR2_LEVELS = Z2 -FCST_VAR2_THRESH = ge263, ge268, ge273, ge288, ge293, ge298 -OBS_VAR2_NAME = DPT -OBS_VAR2_LEVELS = Z2 -OBS_VAR2_THRESH = ge263, ge268, ge273, ge288, ge293, ge298 - -FCST_VAR3_NAME = WIND_Z10_ENS_MEAN -FCST_VAR3_LEVELS = Z10 -FCST_VAR3_THRESH = ge5, ge10, ge15 -OBS_VAR3_NAME = WIND -OBS_VAR3_LEVELS = Z10 -OBS_VAR3_THRESH = ge5, ge10, ge15 - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to PointStat. -# -OBS_POINT_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to PointStat. -# -FCST_POINT_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from PointStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -POINT_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to PointStat relative to -# OBS_POINT_STAT_INPUT_DIR. -# -OBS_POINT_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to PointStat relative to -# FCST_POINT_STAT_INPUT_DIR. -# -FCST_POINT_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from PointStat relative to POINT_STAT_OUTPUT_DIR. -# -POINT_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_TEMPLATE = diff --git a/parm/metplus/PointStat_ensmean_ADPUPA.conf b/parm/metplus/PointStat_ensmean_ADPUPA.conf deleted file mode 100644 index b54c775b46..0000000000 --- a/parm/metplus/PointStat_ensmean_ADPUPA.conf +++ /dev/null @@ -1,319 +0,0 @@ -# Ensemble mean PointStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PointStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {POINT_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to PointStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -POINT_STAT_CONFIG_FILE = {PARM_BASE}/met_config/PointStatConfig_wrapped - -POINT_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA -#POINT_STAT_OBS_QUALITY_EXC = - -POINT_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST -#POINT_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = - -#POINT_STAT_INTERP_VLD_THRESH = -#POINT_STAT_INTERP_SHAPE = -POINT_STAT_INTERP_TYPE_METHOD = BILIN -POINT_STAT_INTERP_TYPE_WIDTH = 2 - -#POINT_STAT_OUTPUT_FLAG_FHO = -#POINT_STAT_OUTPUT_FLAG_CTC = -#POINT_STAT_OUTPUT_FLAG_CTS = -#POINT_STAT_OUTPUT_FLAG_MCTC = -#POINT_STAT_OUTPUT_FLAG_MCTS = -POINT_STAT_OUTPUT_FLAG_CNT = STAT -POINT_STAT_OUTPUT_FLAG_SL1L2 = STAT -#POINT_STAT_OUTPUT_FLAG_SAL1L2 = -POINT_STAT_OUTPUT_FLAG_VL1L2 = STAT -#POINT_STAT_OUTPUT_FLAG_VAL1L2 = -POINT_STAT_OUTPUT_FLAG_VCNT = STAT -#POINT_STAT_OUTPUT_FLAG_PCT = -#POINT_STAT_OUTPUT_FLAG_PSTD = -#POINT_STAT_OUTPUT_FLAG_PJC = -#POINT_STAT_OUTPUT_FLAG_PRC = -#POINT_STAT_OUTPUT_FLAG_ECNT = -#POINT_STAT_OUTPUT_FLAG_RPS = -#POINT_STAT_OUTPUT_FLAG_ECLV = -#POINT_STAT_OUTPUT_FLAG_MPR = -#POINT_STAT_OUTPUT_FLAG_ORANK = - -POINT_STAT_CLIMO_CDF_BINS = 1 -#POINT_STAT_CLIMO_CDF_CENTER_BINS = False -#POINT_STAT_CLIMO_CDF_WRITE_BINS = True - -#POINT_STAT_HSS_EC_VALUE = - -# -# Observation data time window(s). -# -OBS_WINDOW_BEGIN = -1799 -OBS_WINDOW_END = 1800 -OBS_POINT_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} -OBS_POINT_STAT_WINDOW_END = {OBS_WINDOW_END} - -# Optional list of offsets to look for point observation data -POINT_STAT_OFFSETS = 0 -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as that for the ensemble -# mean. This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensmean -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -POINT_STAT_DESC = NA - -# Regrid to specified grid. Indicate NONE if no regridding, or the grid id -# (e.g. G212) -POINT_STAT_REGRID_TO_GRID = NONE -POINT_STAT_REGRID_METHOD = BILIN -POINT_STAT_REGRID_WIDTH = 2 - -POINT_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# sets the -obs_valid_beg command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H} - -# sets the -obs_valid_end command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_END = {valid?fmt=%Y%m%d_%H} - -# Verification Masking regions -# Indicate which grid and polygon masking region, if applicable -POINT_STAT_GRID = - -# List of full path to poly masking files. NOTE: Only short lists of poly -# files work (those that fit on one line), a long list will result in an -# environment variable that is too long, resulting in an error. For long -# lists of poly masking files (i.e. all the mask files in the NCEP_mask -# directory), define these in the METplus PointStat configuration file. -POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly -POINT_STAT_STATION_ID = - -# Message types, if all message types are to be returned, leave this empty, -# otherwise indicate the message types of interest. -POINT_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} - -# set to True to run PointStat once for each name/level combination -# set to False to run PointStat once per run time including all fields -POINT_STAT_ONCE_PER_FIELD = False -# -# List of forecast and corresponding observation fields to process. -# -FCST_VAR1_NAME = TMP_P850_ENS_MEAN -FCST_VAR1_LEVELS = P850 -FCST_VAR1_THRESH = ge288, ge293, ge298 -OBS_VAR1_NAME = TMP -OBS_VAR1_LEVELS = P850 -OBS_VAR1_THRESH = ge288, ge293, ge298 - -FCST_VAR2_NAME = TMP_P700_ENS_MEAN -FCST_VAR2_LEVELS = P700 -FCST_VAR2_THRESH = ge273, ge278, ge283 -OBS_VAR2_NAME = TMP -OBS_VAR2_LEVELS = P700 -OBS_VAR2_THRESH = ge273, ge278, ge283 - -FCST_VAR3_NAME = TMP_P500_ENS_MEAN -FCST_VAR3_LEVELS = P500 -FCST_VAR3_THRESH = ge258, ge263, ge268 -OBS_VAR3_NAME = TMP -OBS_VAR3_LEVELS = P500 -OBS_VAR3_THRESH = ge258, ge263, ge268 - -FCST_VAR4_NAME = DPT_P850_ENS_MEAN -FCST_VAR4_LEVELS = P850 -FCST_VAR4_THRESH = ge273, ge278, ge283 -OBS_VAR4_NAME = DPT -OBS_VAR4_LEVELS = P850 -OBS_VAR4_THRESH = ge273, ge278, ge283 - -FCST_VAR5_NAME = DPT_P700_ENS_MEAN -FCST_VAR5_LEVELS = P700 -FCST_VAR5_THRESH = ge263, ge286, ge273 -OBS_VAR5_NAME = DPT -OBS_VAR5_LEVELS = P700 -OBS_VAR5_THRESH = ge263, ge286, ge273 - -FCST_VAR6_NAME = WIND_P850_ENS_MEAN -FCST_VAR6_LEVELS = P850 -FCST_VAR6_THRESH = ge5, ge10, ge15 -OBS_VAR6_NAME = WIND -OBS_VAR6_LEVELS = P850 -OBS_VAR6_THRESH = ge5, ge10, ge15 - -FCST_VAR7_NAME = WIND_P700_ENS_MEAN -FCST_VAR7_LEVELS = P700 -FCST_VAR7_THRESH = ge10, ge15, ge20 -OBS_VAR7_NAME = WIND -OBS_VAR7_LEVELS = P700 -OBS_VAR7_THRESH = ge10, ge15, ge20 - -FCST_VAR8_NAME = WIND_P500_ENS_MEAN -FCST_VAR8_LEVELS = P500 -FCST_VAR8_THRESH = ge15, ge21, ge26 -OBS_VAR8_NAME = WIND -OBS_VAR8_LEVELS = P500 -OBS_VAR8_THRESH = ge15, ge21, ge26 - -FCST_VAR9_NAME = WIND_P250_ENS_MEAN -FCST_VAR9_LEVELS = P250 -FCST_VAR9_THRESH = ge26, ge31, ge46, ge62 -OBS_VAR9_NAME = WIND -OBS_VAR9_LEVELS = P250 -OBS_VAR9_THRESH = ge26, ge31, ge46, ge62 - -FCST_VAR10_NAME = HGT_P500_ENS_MEAN -FCST_VAR10_LEVELS = P500 -FCST_VAR10_THRESH = ge5400, ge5600, ge5880 -OBS_VAR10_NAME = HGT -OBS_VAR10_LEVELS = P500 -OBS_VAR10_THRESH = ge5400, ge5600, ge5880 - -FCST_VAR11_NAME = CAPE_L0_ENS_MEAN -FCST_VAR11_LEVELS = L0 -FCST_VAR11_THRESH = le1000, gt1000&<2500, gt2500&<4000, gt2500 -FCST_VAR11_OPTIONS = cnt_thresh = [ >0 ]; -OBS_VAR11_NAME = CAPE -OBS_VAR11_LEVELS = L0-100000 -OBS_VAR11_THRESH = le1000, gt1000&<2500, gt2500&<4000, gt2500 -OBS_VAR11_OPTIONS = cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -FCST_VAR12_NAME = HPBL_Z0_ENS_MEAN -FCST_VAR12_LEVELS = Z0 -FCST_VAR12_THRESH = lt500, lt1500, gt1500 -OBS_VAR12_NAME = PBL -OBS_VAR12_LEVELS = L0 -OBS_VAR12_THRESH = lt500, lt1500, gt1500 -OBS_VAR12_OPTIONS = desc = "TKE"; - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to PointStat. -# -OBS_POINT_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to PointStat. -# -FCST_POINT_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from PointStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -POINT_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to PointStat relative to -# OBS_POINT_STAT_INPUT_DIR. -# -OBS_POINT_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to PointStat relative to -# FCST_POINT_STAT_INPUT_DIR. -# -FCST_POINT_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from PointStat relative to POINT_STAT_OUTPUT_DIR. -# -POINT_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_TEMPLATE = diff --git a/parm/metplus/PointStat_ensprob.conf b/parm/metplus/PointStat_ensprob.conf new file mode 100644 index 0000000000..84b9f3954d --- /dev/null +++ b/parm/metplus/PointStat_ensprob.conf @@ -0,0 +1,502 @@ +# Ensemble probabilistic {{MetplusToolName}} METplus Configuration + +[config] + +# List of applications (tools) to run. +PROCESS_LIST = {{MetplusToolName}} + +# time looping - options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +LOOP_BY = INIT + +# Format of INIT_BEG and INIT_END using % items +# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. +# see www.strftime.org for more information +# %Y%m%d%H expands to YYYYMMDDHH +INIT_TIME_FMT = %Y%m%d%H + +# Start time for METplus run - must match INIT_TIME_FMT +INIT_BEG = {{cdate}} + +# End time for METplus run - must match INIT_TIME_FMT +INIT_END = {{cdate}} + +# Increment between METplus runs (in seconds if no units are specified). +# Must be >= 60 seconds. +INIT_INCREMENT = 3600 + +# List of forecast leads to process for each run time (init or valid) +# In hours if units are not specified +# If unset, defaults to 0 (don't loop through forecast leads) +LEAD_SEQ = {{fhr_list}} +# +# Order of loops to process data - Options are times, processes +# Not relevant if only one item is in the PROCESS_LIST +# times = run all wrappers in the PROCESS_LIST for a single run time, then +# increment the run time and run all wrappers again until all times have +# been evaluated. +# processes = run the first wrapper in the PROCESS_LIST for all times +# specified, then repeat for the next item in the PROCESS_LIST until all +# wrappers have been run +# +LOOP_ORDER = times +# +# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. +# +LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} +# +# Specify the name of the METplus log file. +# +LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} +# +# Specify the location and name of the final METplus conf file. +# +METPLUS_CONF = {{ '{' ~ METPLUS_TOOL_NAME ~ '_OUTPUT_DIR}' }}/metplus_final.{{metplus_config_fn}} +# +# Location of MET configuration file to pass to {{MetplusToolName}}. +# +# References PARM_BASE, which is the location of the parm directory +# corresponding to the ush directory of the run_metplus.py script that +# is called or the value of the environment variable METPLUS_PARM_BASE +# if set. +# +{{METPLUS_TOOL_NAME}}_CONFIG_FILE = {PARM_BASE}/met_config/{{MetplusToolName}}Config_wrapped + +{{METPLUS_TOOL_NAME}}_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA +#{{METPLUS_TOOL_NAME}}_OBS_QUALITY_EXC = + +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST +#{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_TIME_INTERP_METHOD = + +#{{METPLUS_TOOL_NAME}}_INTERP_VLD_THRESH = +#{{METPLUS_TOOL_NAME}}_INTERP_SHAPE = +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_METHOD = BILIN +{{METPLUS_TOOL_NAME}}_INTERP_TYPE_WIDTH = 2 + +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_FHO = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CTS = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTC = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MCTS = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_CNT = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SL1L2 = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_SAL1L2 = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VL1L2 = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VAL1L2 = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_VCNT = +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PCT = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PSTD = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PJC = STAT +{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_PRC = STAT +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECNT = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_RPS = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ECLV = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_MPR = +#{{METPLUS_TOOL_NAME}}_OUTPUT_FLAG_ORANK = + +{{METPLUS_TOOL_NAME}}_CLIMO_CDF_BINS = 1 +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_CENTER_BINS = False +#{{METPLUS_TOOL_NAME}}_CLIMO_CDF_WRITE_BINS = True + +#{{METPLUS_TOOL_NAME}}_HSS_EC_VALUE = + +# +# Observation data time window(s). +# +OBS_WINDOW_BEGIN = -1799 +OBS_WINDOW_END = 1800 +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} +OBS_{{METPLUS_TOOL_NAME}}_WINDOW_END = {OBS_WINDOW_END} + +# Optional list of offsets to look for point observation data +{{METPLUS_TOOL_NAME}}_OFFSETS = 0 +# +# Name to identify model (forecast) data in output. +# +# The variable MODEL is recorded in the stat files, and the data in +# these files is then plotted (e.g. using METViewer). Here, we add a +# suffix to MODEL that identifies the data as ensemble-probabilistic. +# This makes it easier to identify each curve. +# +MODEL = {{vx_fcst_model_name}}_ensprob +# +# Name to identify observation data in output. +# +OBTYPE = {{obtype}} +# +# Value to enter under the DESC column in the output stat file. +# +{{METPLUS_TOOL_NAME}}_DESC = NA + +# Regrid to specified grid. Indicate NONE if no regridding, or the grid id +# (e.g. G212) +{{METPLUS_TOOL_NAME}}_REGRID_TO_GRID = NONE +{{METPLUS_TOOL_NAME}}_REGRID_METHOD = BILIN +{{METPLUS_TOOL_NAME}}_REGRID_WIDTH = 2 + +{{METPLUS_TOOL_NAME}}_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} + +# sets the -obs_valid_beg command line argument (optional) +# not used for this example +#{{METPLUS_TOOL_NAME}}_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H} + +# sets the -obs_valid_end command line argument (optional) +# not used for this example +#{{METPLUS_TOOL_NAME}}_OBS_VALID_END = {valid?fmt=%Y%m%d_%H} + +# Verification Masking regions +# Indicate which grid and polygon masking region, if applicable +{{METPLUS_TOOL_NAME}}_GRID = + +# List of full path to poly masking files. NOTE: Only short lists of poly +# files work (those that fit on one line), a long list will result in an +# environment variable that is too long, resulting in an error. For long +# lists of poly masking files (i.e. all the mask files in the NCEP_mask +# directory), define these in the METplus {{MetplusToolName}} configuration file. +{{METPLUS_TOOL_NAME}}_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly +{{METPLUS_TOOL_NAME}}_STATION_ID = + +# Message types, if all message types are to be returned, leave this empty, +# otherwise indicate the message types of interest. +{{METPLUS_TOOL_NAME}}_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} + +# set to True to run {{MetplusToolName}} once for each name/level combination +# set to False to run {{MetplusToolName}} once per run time including all fields +{{METPLUS_TOOL_NAME}}_ONCE_PER_FIELD = False +# +# List of forecast and corresponding observation fields to process. +# Note that the forecast variable name must exactly match the name of a +# variable in the forecast input file(s). +# +{#- +Import the file containing jinja macros. +#} +{%- import metplus_templates_dir ~ '/metplus_macros.jinja' as metplus_macros %} + +{#- +Set the probabilistic threshold to be used for the forecast field. If +necessary, this can be changed to be an input parameter in the calling +script instead of a hard-coded value as below. +#} +{%- set thresh_fcst_prob = '==0.1' %} + +{#- +Jinja requires certain variables to be defined globally within the template +before they can be used in if-statements and other scopes (see Jinja scoping +rules). Define such variables. +#} +{%- set indx_level_fcst = '' %} +{%- set indx_thresh_fcst = '' %} +{%- set error_msg = '' %} +{%- set opts_indent = '' %} +{%- set opts_indent_len = '' %} + +{%- set field_fcst = '' %} +{%- set field_obs = '' %} +{%- set level_fcst = '' %} +{%- set level_obs = '' %} +{%- set thresh_fcst = '' %} +{%- set thresh_obs = '' %} +{%- set thresh_fcst_and_or = '' %} + +{#- +Get the set of valid field groups and ensure that the specified input +field group appears in this list. +#} +{%- set valid_field_groups = vx_config_dict.keys()|list %} +{{- metplus_macros.check_field_group(valid_field_groups, input_field_group) }} + +{#- +Reset the input forecast level so that if it happens to be an accumulation +(e.g. 'A03'), the leading zeros in front of the hour are stipped out (e.g. +reset to 'A3'). +#} +{%- set input_level_fcst = metplus_macros.get_accumulation_no_zero_pad(input_level_fcst) %} + +{#- +Extract from the configuration dictionary the set (which itself is a +dictionary) of fields, levels, and thresholds corresponding to the input +field group. Then set the delimiter string that separates forecast and +observation values in the various items (i.e. dictionary keys and values +representing field names, levels, and thresholds) in this dictionary. +#} +{%- set fields_levels_threshes_cpld = vx_config_dict[input_field_group] %} +{%- set delim_str = metplus_macros.set_delim_str() %} + +{#- +Loop over the fields and set field names, levels, thresholds, and/or +options for each field, both for forecasts and for observations, in the +METplus configuration file. +#} +{%- set ns = namespace(var_count = 0) %} +{#- +This outer for-loop is included to make this code as similar as possible +to the one in GridStat_ensprob.conf. There, treat_fcst_as_prob takes on +both True and False values, although here it only takes on the value +True (which makes the loop redundant). It is not clear why it doesn't +need to be set to False. This is being investigated (12/13/2023). +#} +{%- for treat_fcst_as_prob in [True] %} + + {%- for field_cpld, levels_threshes_cpld in fields_levels_threshes_cpld.items() %} + + {%- if delim_str in field_cpld %} + {%- set field_fcst, field_obs = field_cpld.split(delim_str) %} + {%- else %} + {%- set field_fcst = field_cpld %} + {%- set field_obs = field_cpld %} + {%- endif %} + +{#- +For convenience, create lists of valid forecast and observation levels +for the current field. +#} + {%- set valid_levels_fcst = [] %} + {%- set valid_levels_obs = [] %} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + {%- set tmp = valid_levels_fcst.append(level_fcst) %} + {%- set tmp = valid_levels_obs.append(level_obs) %} + {%- endfor %} + +{#- +Make sure that the input forecast level (input_level_fcst) is set to a +valid value. +#} + {%- if (input_level_fcst != 'all') and (input_level_fcst not in valid_levels_fcst) %} + {%- set error_msg = '\n' ~ +'The input forecast level (input_level_fcst) must be set either to \'all\'\n' ~ +'or to one of the elements in the list of valid levels (valid_levels_fcst)\n' ~ +'for the current forecast field (field_fcst). This is not the case:\n' ~ +' field_fcst = ' ~ field_fcst ~ '\n' ~ +' valid_levels_fcst = ' ~ valid_levels_fcst ~ '\n' ~ +' input_level_fcst = ' ~ input_level_fcst ~ '\n' %} + {{metplus_macros.print_err_and_quit(error_msg)}} + {%- endif %} + +{#- +Loop over the (coupled) levels and corresponding lists of thresholds. +Extract from these the level values for forecasts and observations and +use them to set the forecast and observation field names, levels, +thresholds, and/or options in the METplus configuration file. +#} + {%- for level_cpld, threshes_cpld in levels_threshes_cpld.items() %} + + {%- if delim_str in level_cpld %} + {%- set level_fcst, level_obs = level_cpld.split(delim_str) %} + {%- else %} + {%- set level_fcst = level_cpld %} + {%- set level_obs = level_cpld %} + {%- endif %} + + {%- set valid_threshes_fcst = [] %} + {%- set valid_threshes_obs = [] %} + {%- for thresh_cpld in threshes_cpld %} + {%- if delim_str in thresh_cpld %} + {%- set thresh_fcst, thresh_obs = thresh_cpld.split(delim_str) %} + {%- else %} + {%- set thresh_fcst = thresh_cpld %} + {%- set thresh_obs = thresh_cpld %} + {%- endif %} + {%- set tmp = valid_threshes_fcst.append(thresh_fcst) %} + {%- set tmp = valid_threshes_obs.append(thresh_obs) %} + {%- endfor %} + + {%- if (input_level_fcst == 'all') or (input_level_fcst == level_fcst) %} + + {%- for thresh_fcst in valid_threshes_fcst %} + + {%- if (input_thresh_fcst == 'all') or (input_thresh_fcst == thresh_fcst) %} +{#- +Increment the METplus variable counter. +#} + {%- set ns.var_count = ns.var_count+1 %} + +{#- +Set forecast field name. +#} + {%- set thresh_fcst_and_or = thresh_fcst|replace("&&", ".and.") %} + {%- set thresh_fcst_and_or = thresh_fcst_and_or|replace("||", ".or.") %} +FCST_VAR{{ns.var_count}}_NAME = {{field_fcst}}_{{level_fcst}}_ENS_FREQ_{{thresh_fcst_and_or}} + +{#- +Set forecast field level. +#} +FCST_VAR{{ns.var_count}}_LEVELS = {{level_fcst}} + +{#- +Set forecast field threshold. +Note that since the forecast field being read in is actually a field of +probabilities, we set the forecast threshold to a probabilistic one +(thresh_fcst_prob) and not to the physical threshold (thresh_fcst) in +the dictionary of forecast field names, levels, and thresholds that we +are looping over. +#} +FCST_VAR{{ns.var_count}}_THRESH = {{thresh_fcst_prob}} + +{#- +Set forecast field options. +#} + {%- set opts_indent_len = 20 %} + {%- if (ns.var_count > 9) and (ns.var_count <= 99) %} + {%- set opts_indent_len = opts_indent_len + 1 %} + {%- elif (ns.var_count > 99) and (ns.var_count <= 999) %} + {%- set opts_indent_len = opts_indent_len + 2 %} + {%- elif (ns.var_count > 999) %} + {%- set opts_indent_len = opts_indent_len + 3 %} + {%- endif %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'ADPSFC' %} + + {%- if field_fcst == 'HGT' %} +FCST_VAR{{ns.var_count}}_OPTIONS = desc = "CEILING"; + {%- elif field_fcst == 'VIS' %} +FCST_VAR{{ns.var_count}}_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- endif %} + + {%- endif %} + +{#- +Set observation field name. +#} +OBS_VAR{{ns.var_count}}_NAME = {{field_obs}} + +{#- +Set observation field level. +#} + {%- set indx_level_fcst = valid_levels_fcst.index(level_fcst) %} + {%- set level_obs = valid_levels_obs[indx_level_fcst] %} +OBS_VAR{{ns.var_count}}_LEVELS = {{level_obs}} + +{#- +Set observation field threshold. Note that no observation thresholds +are included in the METplus configuration file if input_thresh_fcst is +set to 'none'. +#} + {%- if (input_thresh_fcst != 'none') %} +{#- +Set the observation threshold. This is given by the element in the list +of valid observation thresholds that has the same index as that of the +current forcast threshold (thresh_fcst) in the list of valid forecast +thresholds. +#} + {%- set indx_thresh_fcst = valid_threshes_fcst.index(thresh_fcst) %} + {%- set thresh_obs = valid_threshes_obs[indx_thresh_fcst] %} +OBS_VAR{{ns.var_count}}_THRESH = {{thresh_obs}} + {%- endif %} + +{#- +Set observation field options. +#} + {%- set opts_indent_len = opts_indent_len - 1 %} + {%- set opts_indent = ' '*opts_indent_len %} + + {%- if input_field_group == 'ADPSFC' %} + + {%- if field_obs == 'CEILING' %} +OBS_VAR{{ns.var_count}}_OPTIONS = GRIB_lvl_typ = 215; +{{opts_indent}}interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- elif field_obs == 'VIS' %} +OBS_VAR{{ns.var_count}}_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } + {%- endif %} + + {%- elif input_field_group == 'ADPUPA' %} + + {%- if field_obs == 'CAPE' %} +OBS_VAR{{ns.var_count}}_OPTIONS = cnt_thresh = [ >0 ]; +{{opts_indent}}cnt_logic = UNION; + {%- elif field_obs == 'PBL' %} +OBS_VAR{{ns.var_count}}_OPTIONS = desc = "TKE"; + {%- endif %} + + {%- endif %} + +{#- +Print out a newline to separate the settings for the current field (both +forecast and observation settings) from those for the next field. +#} + {{- '\n' }} + + {%- endif %} + {%- endfor %} + + {%- endif %} + + {%- endfor %} + {%- endfor %} +{%- endfor %} +# +# Forecast data description variables +# +FCST_IS_PROB = True +FCST_PROB_IN_GRIB_PDS = False + +# End of [config] section and start of [dir] section. +[dir] +# +# Directory containing observation input to {{MetplusToolName}}. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{obs_input_dir}} +# +# Directory containing forecast input to {{MetplusToolName}}. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR = {{fcst_input_dir}} +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR = +# +# Directory containing climatology mean input to {{MetplusToolName}}. Not used in +# this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR = +# +# Directory in which to write output from {{MetplusToolName}}. +# +# OUTPUT_BASE apparently has to be set to something; it cannot be left +# to its default value. But it is not explicitly used elsewhere in this +# configuration file. +# +OUTPUT_BASE = {{output_base}} +{{METPLUS_TOOL_NAME}}_OUTPUT_DIR = {{output_dir}} +# +# Directory for staging data. +# +STAGING_DIR = {{staging_dir}} + +# End of [dir] section and start of [filename_templates] section. +[filename_templates] +# +# Template for observation input to {{MetplusToolName}} relative to +# OBS_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +OBS_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{obs_input_fn_template}} +# +# Template for forecast input to {{MetplusToolName}} relative to +# FCST_{{METPLUS_TOOL_NAME}}_INPUT_DIR. +# +FCST_{{METPLUS_TOOL_NAME}}_INPUT_TEMPLATE = {{fcst_input_fn_template}} +# +# Template for output from {{MetplusToolName}} relative to {{METPLUS_TOOL_NAME}}_OUTPUT_DIR. +# +{{METPLUS_TOOL_NAME}}_OUTPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_MEAN_INPUT_TEMPLATE = +# +# Template for climatology input to {{MetplusToolName}} relative to +# {{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_DIR. Not used in this example. +# +{{METPLUS_TOOL_NAME}}_CLIMO_STDEV_INPUT_TEMPLATE = diff --git a/parm/metplus/PointStat_ensprob_ADPSFC.conf b/parm/metplus/PointStat_ensprob_ADPSFC.conf deleted file mode 100644 index c9333b2c81..0000000000 --- a/parm/metplus/PointStat_ensprob_ADPSFC.conf +++ /dev/null @@ -1,415 +0,0 @@ -# Ensemble probabilistic PointStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PointStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {POINT_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to PointStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -POINT_STAT_CONFIG_FILE = {PARM_BASE}/met_config/PointStatConfig_wrapped - -POINT_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA -#POINT_STAT_OBS_QUALITY_EXC = - -POINT_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST -#POINT_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = - -#POINT_STAT_INTERP_VLD_THRESH = -#POINT_STAT_INTERP_SHAPE = -POINT_STAT_INTERP_TYPE_METHOD = BILIN -POINT_STAT_INTERP_TYPE_WIDTH = 2 - -#POINT_STAT_OUTPUT_FLAG_FHO = -#POINT_STAT_OUTPUT_FLAG_CTC = -#POINT_STAT_OUTPUT_FLAG_CTS = -#POINT_STAT_OUTPUT_FLAG_MCTC = -#POINT_STAT_OUTPUT_FLAG_MCTS = -#POINT_STAT_OUTPUT_FLAG_CNT = -#POINT_STAT_OUTPUT_FLAG_SL1L2 = -#POINT_STAT_OUTPUT_FLAG_SAL1L2 = -#POINT_STAT_OUTPUT_FLAG_VL1L2 = -#POINT_STAT_OUTPUT_FLAG_VAL1L2 = -#POINT_STAT_OUTPUT_FLAG_VCNT = -POINT_STAT_OUTPUT_FLAG_PCT = STAT -POINT_STAT_OUTPUT_FLAG_PSTD = STAT -POINT_STAT_OUTPUT_FLAG_PJC = STAT -POINT_STAT_OUTPUT_FLAG_PRC = STAT -#POINT_STAT_OUTPUT_FLAG_ECNT = -#POINT_STAT_OUTPUT_FLAG_RPS = -#POINT_STAT_OUTPUT_FLAG_ECLV = -#POINT_STAT_OUTPUT_FLAG_MPR = -#POINT_STAT_OUTPUT_FLAG_ORANK = - -POINT_STAT_CLIMO_CDF_BINS = 1 -#POINT_STAT_CLIMO_CDF_CENTER_BINS = False -#POINT_STAT_CLIMO_CDF_WRITE_BINS = True - -#POINT_STAT_HSS_EC_VALUE = - -# -# Observation data time window(s). -# -OBS_WINDOW_BEGIN = -1799 -OBS_WINDOW_END = 1800 -OBS_POINT_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} -OBS_POINT_STAT_WINDOW_END = {OBS_WINDOW_END} - -# Optional list of offsets to look for point observation data -POINT_STAT_OFFSETS = 0 -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as ensemble-probabilistic. -# This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensprob -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -POINT_STAT_DESC = NA - -# Regrid to specified grid. Indicate NONE if no regridding, or the grid id -# (e.g. G212) -POINT_STAT_REGRID_TO_GRID = NONE -POINT_STAT_REGRID_METHOD = BILIN -POINT_STAT_REGRID_WIDTH = 2 - -POINT_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# sets the -obs_valid_beg command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H} - -# sets the -obs_valid_end command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_END = {valid?fmt=%Y%m%d_%H} - -# Verification Masking regions -# Indicate which grid and polygon masking region, if applicable -POINT_STAT_GRID = - -# List of full path to poly masking files. NOTE: Only short lists of poly -# files work (those that fit on one line), a long list will result in an -# environment variable that is too long, resulting in an error. For long -# lists of poly masking files (i.e. all the mask files in the NCEP_mask -# directory), define these in the METplus PointStat configuration file. -POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly -POINT_STAT_STATION_ID = - -# Message types, if all message types are to be returned, leave this empty, -# otherwise indicate the message types of interest. -POINT_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} - -# set to True to run PointStat once for each name/level combination -# set to False to run PointStat once per run time including all fields -POINT_STAT_ONCE_PER_FIELD = False -# -# List of forecast and corresponding observation fields to process. -# Note that the forecast variable name must exactly match the name of a -# variable in the forecast input file(s). -# -FCST_VAR1_NAME = TMP_Z2_ENS_FREQ_ge268 -FCST_VAR1_LEVELS = Z2 -FCST_VAR1_THRESH = ==0.1 -OBS_VAR1_NAME = TMP -OBS_VAR1_LEVELS = Z2 -OBS_VAR1_THRESH = ge268 - -FCST_VAR2_NAME = TMP_Z2_ENS_FREQ_ge273 -FCST_VAR2_LEVELS = Z2 -FCST_VAR2_THRESH = ==0.1 -OBS_VAR2_NAME = TMP -OBS_VAR2_LEVELS = Z2 -OBS_VAR2_THRESH = ge273 - -FCST_VAR3_NAME = TMP_Z2_ENS_FREQ_ge278 -FCST_VAR3_LEVELS = Z2 -FCST_VAR3_THRESH = ==0.1 -OBS_VAR3_NAME = TMP -OBS_VAR3_LEVELS = Z2 -OBS_VAR3_THRESH = ge278 - -FCST_VAR4_NAME = TMP_Z2_ENS_FREQ_ge293 -FCST_VAR4_LEVELS = Z2 -FCST_VAR4_THRESH = ==0.1 -OBS_VAR4_NAME = TMP -OBS_VAR4_LEVELS = Z2 -OBS_VAR4_THRESH = ge293 - -FCST_VAR5_NAME = TMP_Z2_ENS_FREQ_ge298 -FCST_VAR5_LEVELS = Z2 -FCST_VAR5_THRESH = ==0.1 -OBS_VAR5_NAME = TMP -OBS_VAR5_LEVELS = Z2 -OBS_VAR5_THRESH = ge298 - -FCST_VAR6_NAME = TMP_Z2_ENS_FREQ_ge303 -FCST_VAR6_LEVELS = Z2 -FCST_VAR6_THRESH = ==0.1 -OBS_VAR6_NAME = TMP -OBS_VAR6_LEVELS = Z2 -OBS_VAR6_THRESH = ge303 - -FCST_VAR7_NAME = DPT_Z2_ENS_FREQ_ge263 -FCST_VAR7_LEVELS = Z2 -FCST_VAR7_THRESH = ==0.1 -OBS_VAR7_NAME = DPT -OBS_VAR7_LEVELS = Z2 -OBS_VAR7_THRESH = ge263 - -FCST_VAR8_NAME = DPT_Z2_ENS_FREQ_ge268 -FCST_VAR8_LEVELS = Z2 -FCST_VAR8_THRESH = ==0.1 -OBS_VAR8_NAME = DPT -OBS_VAR8_LEVELS = Z2 -OBS_VAR8_THRESH = ge268 - -FCST_VAR9_NAME = DPT_Z2_ENS_FREQ_ge273 -FCST_VAR9_LEVELS = Z2 -FCST_VAR9_THRESH = ==0.1 -OBS_VAR9_NAME = DPT -OBS_VAR9_LEVELS = Z2 -OBS_VAR9_THRESH = ge273 - -FCST_VAR10_NAME = DPT_Z2_ENS_FREQ_ge288 -FCST_VAR10_LEVELS = Z2 -FCST_VAR10_THRESH = ==0.1 -OBS_VAR10_NAME = DPT -OBS_VAR10_LEVELS = Z2 -OBS_VAR10_THRESH = ge288 - -FCST_VAR11_NAME = DPT_Z2_ENS_FREQ_ge293 -FCST_VAR11_LEVELS = Z2 -FCST_VAR11_THRESH = ==0.1 -OBS_VAR11_NAME = DPT -OBS_VAR11_LEVELS = Z2 -OBS_VAR11_THRESH = ge293 - -FCST_VAR12_NAME = DPT_Z2_ENS_FREQ_ge298 -FCST_VAR12_LEVELS = Z2 -FCST_VAR12_THRESH = ==0.1 -OBS_VAR12_NAME = DPT -OBS_VAR12_LEVELS = Z2 -OBS_VAR12_THRESH = ge298 - -FCST_VAR13_NAME = WIND_Z10_ENS_FREQ_ge5 -FCST_VAR13_LEVELS = Z10 -FCST_VAR13_THRESH = ==0.1 -OBS_VAR13_NAME = WIND -OBS_VAR13_LEVELS = Z10 -OBS_VAR13_THRESH = ge5 - -FCST_VAR14_NAME = WIND_Z10_ENS_FREQ_ge10 -FCST_VAR14_LEVELS = Z10 -FCST_VAR14_THRESH = ==0.1 -OBS_VAR14_NAME = WIND -OBS_VAR14_LEVELS = Z10 -OBS_VAR14_THRESH = ge10 - -FCST_VAR15_NAME = WIND_Z10_ENS_FREQ_ge15 -FCST_VAR15_LEVELS = Z10 -FCST_VAR15_THRESH = ==0.1 -OBS_VAR15_NAME = WIND -OBS_VAR15_LEVELS = Z10 -OBS_VAR15_THRESH = ge15 - -FCST_VAR16_NAME = TCDC_L0_ENS_FREQ_lt25 -FCST_VAR16_LEVELS = L0 -FCST_VAR16_THRESH = ==0.1 -OBS_VAR16_NAME = TCDC -OBS_VAR16_LEVELS = L0 -OBS_VAR16_THRESH = lt25 - -FCST_VAR17_NAME = TCDC_L0_ENS_FREQ_gt75 -FCST_VAR17_LEVELS = L0 -FCST_VAR17_THRESH = ==0.1 -OBS_VAR17_NAME = TCDC -OBS_VAR17_LEVELS = L0 -OBS_VAR17_THRESH = gt75 - -FCST_VAR18_NAME = VIS_L0_ENS_FREQ_lt1609 -FCST_VAR18_LEVELS = L0 -FCST_VAR18_THRESH = ==0.1 -FCST_VAR18_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } -OBS_VAR18_NAME = VIS -OBS_VAR18_LEVELS = L0 -OBS_VAR18_THRESH = lt1609 -OBS_VAR18_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } - -FCST_VAR19_NAME = VIS_L0_ENS_FREQ_lt8045 -FCST_VAR19_LEVELS = L0 -FCST_VAR19_THRESH = ==0.1 -FCST_VAR19_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } -OBS_VAR19_NAME = VIS -OBS_VAR19_LEVELS = L0 -OBS_VAR19_THRESH = lt8045 -OBS_VAR19_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } - -FCST_VAR20_NAME = VIS_L0_ENS_FREQ_ge8045 -FCST_VAR20_LEVELS = L0 -FCST_VAR20_THRESH = ==0.1 -FCST_VAR20_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } -OBS_VAR20_NAME = VIS -OBS_VAR20_LEVELS = L0 -OBS_VAR20_THRESH = ge8045 -OBS_VAR20_OPTIONS = interp = { type = [ { method = NEAREST; width = 1; } ]; } - -FCST_VAR21_NAME = HGT_L0_ENS_FREQ_lt152 -FCST_VAR21_LEVELS = L0 -FCST_VAR21_THRESH = ==0.1 -FCST_VAR21_OPTIONS = desc = "CEILING"; -OBS_VAR21_NAME = CEILING -OBS_VAR21_LEVELS = L0 -OBS_VAR21_THRESH = lt152 -OBS_VAR21_OPTIONS = GRIB_lvl_typ = 215; - interp = { type = [ { method = NEAREST; width = 1; } ]; } - -FCST_VAR22_NAME = HGT_L0_ENS_FREQ_lt1520 -FCST_VAR22_LEVELS = L0 -FCST_VAR22_THRESH = ==0.1 -FCST_VAR22_OPTIONS = desc = "CEILING"; -OBS_VAR22_NAME = CEILING -OBS_VAR22_LEVELS = L0 -OBS_VAR22_THRESH = lt1520 -OBS_VAR22_OPTIONS = GRIB_lvl_typ = 215; - interp = { type = [ { method = NEAREST; width = 1; } ]; } - -FCST_VAR23_NAME = HGT_L0_ENS_FREQ_ge914 -FCST_VAR23_LEVELS = L0 -FCST_VAR23_THRESH = ==0.1 -FCST_VAR23_OPTIONS = desc = "CEILING"; -OBS_VAR23_NAME = CEILING -OBS_VAR23_LEVELS = L0 -OBS_VAR23_THRESH = ge914 -OBS_VAR23_OPTIONS = GRIB_lvl_typ = 215; - interp = { type = [ { method = NEAREST; width = 1; } ]; } - -# -# Forecast data description variables -# -FCST_IS_PROB = True -FCST_PROB_IN_GRIB_PDS = False - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to PointStat. -# -OBS_POINT_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to PointStat. -# -FCST_POINT_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from PointStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -POINT_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to PointStat relative to -# OBS_POINT_STAT_INPUT_DIR. -# -OBS_POINT_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to PointStat relative to -# FCST_POINT_STAT_INPUT_DIR. -# -FCST_POINT_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from PointStat relative to POINT_STAT_OUTPUT_DIR. -# -POINT_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_TEMPLATE = diff --git a/parm/metplus/PointStat_ensprob_ADPUPA.conf b/parm/metplus/PointStat_ensprob_ADPUPA.conf deleted file mode 100644 index eab0270c69..0000000000 --- a/parm/metplus/PointStat_ensprob_ADPUPA.conf +++ /dev/null @@ -1,523 +0,0 @@ -# Ensemble probabilistic PointStat METplus Configuration - -[config] - -# List of applications (tools) to run. -PROCESS_LIST = PointStat - -# time looping - options are INIT, VALID, RETRO, and REALTIME -# If set to INIT or RETRO: -# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set -# If set to VALID or REALTIME: -# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set -LOOP_BY = INIT - -# Format of INIT_BEG and INIT_END using % items -# %Y = 4 digit year, %m = 2 digit month, %d = 2 digit day, etc. -# see www.strftime.org for more information -# %Y%m%d%H expands to YYYYMMDDHH -INIT_TIME_FMT = %Y%m%d%H - -# Start time for METplus run - must match INIT_TIME_FMT -INIT_BEG = {{cdate}} - -# End time for METplus run - must match INIT_TIME_FMT -INIT_END = {{cdate}} - -# Increment between METplus runs (in seconds if no units are specified). -# Must be >= 60 seconds. -INIT_INCREMENT = 3600 - -# List of forecast leads to process for each run time (init or valid) -# In hours if units are not specified -# If unset, defaults to 0 (don't loop through forecast leads) -LEAD_SEQ = {{fhr_list}} -# -# Order of loops to process data - Options are times, processes -# Not relevant if only one item is in the PROCESS_LIST -# times = run all wrappers in the PROCESS_LIST for a single run time, then -# increment the run time and run all wrappers again until all times have -# been evaluated. -# processes = run the first wrapper in the PROCESS_LIST for all times -# specified, then repeat for the next item in the PROCESS_LIST until all -# wrappers have been run -# -LOOP_ORDER = times -# -# Verbosity of MET logging output. 0 to 5; 0 is quiet, 5 is loud. -# -LOG_{{METPLUS_TOOL_NAME}}_VERBOSITY = {{metplus_verbosity_level}} -# -# Specify the name of the METplus log file. -# -LOG_METPLUS = {LOG_DIR}/{{metplus_log_fn}} -# -# Specify the location and name of the final METplus conf file. -# -METPLUS_CONF = {POINT_STAT_OUTPUT_DIR}/metplus_final.{{metplus_config_fn}} -# -# Location of MET configuration file to pass to PointStat. -# -# References PARM_BASE, which is the location of the parm directory -# corresponding to the ush directory of the run_metplus.py script that -# is called or the value of the environment variable METPLUS_PARM_BASE -# if set. -# -POINT_STAT_CONFIG_FILE = {PARM_BASE}/met_config/PointStatConfig_wrapped - -POINT_STAT_OBS_QUALITY_INC = 0, 1, 2, 3, 9, NA -#POINT_STAT_OBS_QUALITY_EXC = - -POINT_STAT_CLIMO_MEAN_TIME_INTERP_METHOD = NEAREST -#POINT_STAT_CLIMO_STDEV_TIME_INTERP_METHOD = - -#POINT_STAT_INTERP_VLD_THRESH = -#POINT_STAT_INTERP_SHAPE = -POINT_STAT_INTERP_TYPE_METHOD = BILIN -POINT_STAT_INTERP_TYPE_WIDTH = 2 - -#POINT_STAT_OUTPUT_FLAG_FHO = -#POINT_STAT_OUTPUT_FLAG_CTC = -#POINT_STAT_OUTPUT_FLAG_CTS = -#POINT_STAT_OUTPUT_FLAG_MCTC = -#POINT_STAT_OUTPUT_FLAG_MCTS = -#POINT_STAT_OUTPUT_FLAG_CNT = -#POINT_STAT_OUTPUT_FLAG_SL1L2 = -#POINT_STAT_OUTPUT_FLAG_SAL1L2 = -#POINT_STAT_OUTPUT_FLAG_VL1L2 = -#POINT_STAT_OUTPUT_FLAG_VAL1L2 = -#POINT_STAT_OUTPUT_FLAG_VCNT = -POINT_STAT_OUTPUT_FLAG_PCT = STAT -POINT_STAT_OUTPUT_FLAG_PSTD = STAT -POINT_STAT_OUTPUT_FLAG_PJC = STAT -POINT_STAT_OUTPUT_FLAG_PRC = STAT -#POINT_STAT_OUTPUT_FLAG_ECNT = -#POINT_STAT_OUTPUT_FLAG_RPS = -#POINT_STAT_OUTPUT_FLAG_ECLV = -#POINT_STAT_OUTPUT_FLAG_MPR = -#POINT_STAT_OUTPUT_FLAG_ORANK = - -POINT_STAT_CLIMO_CDF_BINS = 1 -#POINT_STAT_CLIMO_CDF_CENTER_BINS = False -#POINT_STAT_CLIMO_CDF_WRITE_BINS = True - -#POINT_STAT_HSS_EC_VALUE = - -# -# Observation data time window(s). -# -OBS_WINDOW_BEGIN = -1799 -OBS_WINDOW_END = 1800 -OBS_POINT_STAT_WINDOW_BEGIN = {OBS_WINDOW_BEGIN} -OBS_POINT_STAT_WINDOW_END = {OBS_WINDOW_END} - -# Optional list of offsets to look for point observation data -POINT_STAT_OFFSETS = 0 -# -# Name to identify model (forecast) data in output. -# -# The variable MODEL is recorded in the stat files, and the data in -# these files is then plotted (e.g. using METViewer). Here, we add a -# suffix to MODEL that identifies the data as ensemble-probabilistic. -# This makes it easier to identify each curve. -# -MODEL = {{vx_fcst_model_name}}_ensprob -# -# Name to identify observation data in output. -# -OBTYPE = {{obtype}} -# -# Value to enter under the DESC column in the output stat file. -# -POINT_STAT_DESC = NA - -# Regrid to specified grid. Indicate NONE if no regridding, or the grid id -# (e.g. G212) -POINT_STAT_REGRID_TO_GRID = NONE -POINT_STAT_REGRID_METHOD = BILIN -POINT_STAT_REGRID_WIDTH = 2 - -POINT_STAT_OUTPUT_PREFIX = {MODEL}_{{fieldname_in_met_filedir_names}}_{OBTYPE} - -# sets the -obs_valid_beg command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_BEG = {valid?fmt=%Y%m%d_%H} - -# sets the -obs_valid_end command line argument (optional) -# not used for this example -#POINT_STAT_OBS_VALID_END = {valid?fmt=%Y%m%d_%H} - -# Verification Masking regions -# Indicate which grid and polygon masking region, if applicable -POINT_STAT_GRID = - -# List of full path to poly masking files. NOTE: Only short lists of poly -# files work (those that fit on one line), a long list will result in an -# environment variable that is too long, resulting in an error. For long -# lists of poly masking files (i.e. all the mask files in the NCEP_mask -# directory), define these in the METplus PointStat configuration file. -POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly -POINT_STAT_STATION_ID = - -# Message types, if all message types are to be returned, leave this empty, -# otherwise indicate the message types of interest. -POINT_STAT_MESSAGE_TYPE = {{fieldname_in_met_filedir_names}} - -# set to True to run PointStat once for each name/level combination -# set to False to run PointStat once per run time including all fields -POINT_STAT_ONCE_PER_FIELD = False -# -# List of forecast and corresponding observation fields to process. -# Note that the forecast variable name must exactly match the name of a -# variable in the forecast input file(s). -# -FCST_VAR1_NAME = TMP_P850_ENS_FREQ_ge288 -FCST_VAR1_LEVELS = P850 -FCST_VAR1_THRESH = ==0.1 -OBS_VAR1_NAME = TMP -OBS_VAR1_LEVELS = P850 -OBS_VAR1_THRESH = ge288 - -FCST_VAR2_NAME = TMP_P850_ENS_FREQ_ge293 -FCST_VAR2_LEVELS = P850 -FCST_VAR2_THRESH = ==0.1 -OBS_VAR2_NAME = TMP -OBS_VAR2_LEVELS = P850 -OBS_VAR2_THRESH = ge293 - -FCST_VAR3_NAME = TMP_P850_ENS_FREQ_ge298 -FCST_VAR3_LEVELS = P850 -FCST_VAR3_THRESH = ==0.1 -OBS_VAR3_NAME = TMP -OBS_VAR3_LEVELS = P850 -OBS_VAR3_THRESH = ge298 - -FCST_VAR4_NAME = TMP_P700_ENS_FREQ_ge273 -FCST_VAR4_LEVELS = P700 -FCST_VAR4_THRESH = ==0.1 -OBS_VAR4_NAME = TMP -OBS_VAR4_LEVELS = P700 -OBS_VAR4_THRESH = ge273 - -FCST_VAR5_NAME = TMP_P700_ENS_FREQ_ge278 -FCST_VAR5_LEVELS = P700 -FCST_VAR5_THRESH = ==0.1 -OBS_VAR5_NAME = TMP -OBS_VAR5_LEVELS = P700 -OBS_VAR5_THRESH = ge278 - -FCST_VAR6_NAME = TMP_P700_ENS_FREQ_ge283 -FCST_VAR6_LEVELS = P700 -FCST_VAR6_THRESH = ==0.1 -OBS_VAR6_NAME = TMP -OBS_VAR6_LEVELS = P700 -OBS_VAR6_THRESH = ge283 - -FCST_VAR7_NAME = TMP_P500_ENS_FREQ_ge258 -FCST_VAR7_LEVELS = P500 -FCST_VAR7_THRESH = ==0.1 -OBS_VAR7_NAME = TMP -OBS_VAR7_LEVELS = P500 -OBS_VAR7_THRESH = ge258 - -FCST_VAR8_NAME = TMP_P500_ENS_FREQ_ge263 -FCST_VAR8_LEVELS = P500 -FCST_VAR8_THRESH = ==0.1 -OBS_VAR8_NAME = TMP -OBS_VAR8_LEVELS = P500 -OBS_VAR8_THRESH = ge263 - -FCST_VAR9_NAME = TMP_P500_ENS_FREQ_ge268 -FCST_VAR9_LEVELS = P500 -FCST_VAR9_THRESH = ==0.1 -OBS_VAR9_NAME = TMP -OBS_VAR9_LEVELS = P500 -OBS_VAR9_THRESH = ge268 - -FCST_VAR10_NAME = DPT_P850_ENS_FREQ_ge273 -FCST_VAR10_LEVELS = P850 -FCST_VAR10_THRESH = ==0.1 -OBS_VAR10_NAME = DPT -OBS_VAR10_LEVELS = P850 -OBS_VAR10_THRESH = ge273 - -FCST_VAR11_NAME = DPT_P850_ENS_FREQ_ge278 -FCST_VAR11_LEVELS = P850 -FCST_VAR11_THRESH = ==0.1 -OBS_VAR11_NAME = DPT -OBS_VAR11_LEVELS = P850 -OBS_VAR11_THRESH = ge278 - -FCST_VAR12_NAME = DPT_P850_ENS_FREQ_ge283 -FCST_VAR12_LEVELS = P850 -FCST_VAR12_THRESH = ==0.1 -OBS_VAR12_NAME = DPT -OBS_VAR12_LEVELS = P850 -OBS_VAR12_THRESH = ge283 - -FCST_VAR13_NAME = DPT_P700_ENS_FREQ_ge263 -FCST_VAR13_LEVELS = P700 -FCST_VAR13_THRESH = ==0.1 -OBS_VAR13_NAME = DPT -OBS_VAR13_LEVELS = P700 -OBS_VAR13_THRESH = ge263 - -FCST_VAR14_NAME = DPT_P700_ENS_FREQ_ge268 -FCST_VAR14_LEVELS = P700 -FCST_VAR14_THRESH = ==0.1 -OBS_VAR14_NAME = DPT -OBS_VAR14_LEVELS = P700 -OBS_VAR14_THRESH = ge268 - -FCST_VAR15_NAME = DPT_P700_ENS_FREQ_ge273 -FCST_VAR15_LEVELS = P700 -FCST_VAR15_THRESH = ==0.1 -OBS_VAR15_NAME = DPT -OBS_VAR15_LEVELS = P700 -OBS_VAR15_THRESH = ge273 - -FCST_VAR16_NAME = WIND_P850_ENS_FREQ_ge5 -FCST_VAR16_LEVELS = P850 -FCST_VAR16_THRESH = ==0.1 -OBS_VAR16_NAME = WIND -OBS_VAR16_LEVELS = P850 -OBS_VAR16_THRESH = ge5 - -FCST_VAR17_NAME = WIND_P850_ENS_FREQ_ge10 -FCST_VAR17_LEVELS = P850 -FCST_VAR17_THRESH = ==0.1 -OBS_VAR17_NAME = WIND -OBS_VAR17_LEVELS = P850 -OBS_VAR17_THRESH = ge10 - -FCST_VAR18_NAME = WIND_P850_ENS_FREQ_ge15 -FCST_VAR18_LEVELS = P850 -FCST_VAR18_THRESH = ==0.1 -OBS_VAR18_NAME = WIND -OBS_VAR18_LEVELS = P850 -OBS_VAR18_THRESH = ge15 - -FCST_VAR19_NAME = WIND_P700_ENS_FREQ_ge10 -FCST_VAR19_LEVELS = P700 -FCST_VAR19_THRESH = ==0.1 -OBS_VAR19_NAME = WIND -OBS_VAR19_LEVELS = P700 -OBS_VAR19_THRESH = ge10 - -FCST_VAR20_NAME = WIND_P700_ENS_FREQ_ge15 -FCST_VAR20_LEVELS = P700 -FCST_VAR20_THRESH = ==0.1 -OBS_VAR20_NAME = WIND -OBS_VAR20_LEVELS = P700 -OBS_VAR20_THRESH = ge15 - -FCST_VAR21_NAME = WIND_P700_ENS_FREQ_ge20 -FCST_VAR21_LEVELS = P700 -FCST_VAR21_THRESH = ==0.1 -OBS_VAR21_NAME = WIND -OBS_VAR21_LEVELS = P700 -OBS_VAR21_THRESH = ge20 - -FCST_VAR22_NAME = WIND_P500_ENS_FREQ_ge15 -FCST_VAR22_LEVELS = P500 -FCST_VAR22_THRESH = ==0.1 -OBS_VAR22_NAME = WIND -OBS_VAR22_LEVELS = P500 -OBS_VAR22_THRESH = ge15 - -FCST_VAR23_NAME = WIND_P500_ENS_FREQ_ge21 -FCST_VAR23_LEVELS = P500 -FCST_VAR23_THRESH = ==0.1 -OBS_VAR23_NAME = WIND -OBS_VAR23_LEVELS = P500 -OBS_VAR23_THRESH = ge21 - -FCST_VAR24_NAME = WIND_P500_ENS_FREQ_ge26 -FCST_VAR24_LEVELS = P500 -FCST_VAR24_THRESH = ==0.1 -OBS_VAR24_NAME = WIND -OBS_VAR24_LEVELS = P500 -OBS_VAR24_THRESH = ge26 - -FCST_VAR25_NAME = WIND_P250_ENS_FREQ_ge26 -FCST_VAR25_LEVELS = P250 -FCST_VAR25_THRESH = ==0.1 -OBS_VAR25_NAME = WIND -OBS_VAR25_LEVELS = P250 -OBS_VAR25_THRESH = ge26 - -FCST_VAR26_NAME = WIND_P250_ENS_FREQ_ge31 -FCST_VAR26_LEVELS = P250 -FCST_VAR26_THRESH = ==0.1 -OBS_VAR26_NAME = WIND -OBS_VAR26_LEVELS = P250 -OBS_VAR26_THRESH = ge31 - -FCST_VAR27_NAME = WIND_P250_ENS_FREQ_ge36 -FCST_VAR27_LEVELS = P250 -FCST_VAR27_THRESH = ==0.1 -OBS_VAR27_NAME = WIND -OBS_VAR27_LEVELS = P250 -OBS_VAR27_THRESH = ge36 - -FCST_VAR28_NAME = WIND_P250_ENS_FREQ_ge46 -FCST_VAR28_LEVELS = P250 -FCST_VAR28_THRESH = ==0.1 -OBS_VAR28_NAME = WIND -OBS_VAR28_LEVELS = P250 -OBS_VAR28_THRESH = ge46 - -FCST_VAR29_NAME = WIND_P250_ENS_FREQ_ge62 -FCST_VAR29_LEVELS = P250 -FCST_VAR29_THRESH = ==0.1 -OBS_VAR29_NAME = WIND -OBS_VAR29_LEVELS = P250 -OBS_VAR29_THRESH = ge62 - -FCST_VAR30_NAME = HGT_P500_ENS_FREQ_ge5400 -FCST_VAR30_LEVELS = P500 -FCST_VAR30_THRESH = ==0.1 -OBS_VAR30_NAME = HGT -OBS_VAR30_LEVELS = P500 -OBS_VAR30_THRESH = ge5400 - -FCST_VAR31_NAME = HGT_P500_ENS_FREQ_ge5600 -FCST_VAR31_LEVELS = P500 -FCST_VAR31_THRESH = ==0.1 -OBS_VAR31_NAME = HGT -OBS_VAR31_LEVELS = P500 -OBS_VAR31_THRESH = ge5600 - -FCST_VAR32_NAME = HGT_P500_ENS_FREQ_ge5880 -FCST_VAR32_LEVELS = P500 -FCST_VAR32_THRESH = ==0.1 -OBS_VAR32_NAME = HGT -OBS_VAR32_LEVELS = P500 -OBS_VAR32_THRESH = ge5880 - -FCST_VAR33_NAME = CAPE_L0_ENS_FREQ_le1000 -FCST_VAR33_LEVELS = L0 -FCST_VAR33_THRESH = ==0.1 -OBS_VAR33_NAME = CAPE -OBS_VAR33_LEVELS = L0-100000 -OBS_VAR33_THRESH = le1000 -OBS_VAR33_OPTIONS = cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -FCST_VAR34_NAME = CAPE_L0_ENS_FREQ_gt1000.and.lt2500 -FCST_VAR34_LEVELS = L0 -FCST_VAR34_THRESH = ==0.1 -OBS_VAR34_NAME = CAPE -OBS_VAR34_LEVELS = L0-100000 -OBS_VAR34_THRESH = gt1000&<2500 -OBS_VAR34_OPTIONS = cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -FCST_VAR35_NAME = CAPE_L0_ENS_FREQ_gt2500.and.lt4000 -FCST_VAR35_LEVELS = L0 -FCST_VAR35_THRESH = ==0.1 -OBS_VAR35_NAME = CAPE -OBS_VAR35_LEVELS = L0-100000 -OBS_VAR35_THRESH = gt2500&<4000 -OBS_VAR35_OPTIONS = cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -FCST_VAR36_NAME = CAPE_L0_ENS_FREQ_gt2500 -FCST_VAR36_LEVELS = L0 -FCST_VAR36_THRESH = ==0.1 -OBS_VAR36_NAME = CAPE -OBS_VAR36_LEVELS = L0-100000 -OBS_VAR36_THRESH = gt2500 -OBS_VAR36_OPTIONS = cnt_thresh = [ >0 ]; - cnt_logic = UNION; - -FCST_VAR37_NAME = HPBL_Z0_ENS_FREQ_lt500 -FCST_VAR37_LEVELS = Z0 -FCST_VAR37_THRESH = ==0.1 -OBS_VAR37_NAME = PBL -OBS_VAR37_LEVELS = L0 -OBS_VAR37_THRESH = lt500 -OBS_VAR37_OPTIONS = desc = "TKE"; - -FCST_VAR38_NAME = HPBL_Z0_ENS_FREQ_lt1500 -FCST_VAR38_LEVELS = Z0 -FCST_VAR38_THRESH = ==0.1 -OBS_VAR38_NAME = PBL -OBS_VAR38_LEVELS = L0 -OBS_VAR38_THRESH = lt1500 -OBS_VAR38_OPTIONS = desc = "TKE"; - -FCST_VAR39_NAME = HPBL_Z0_ENS_FREQ_gt1500 -FCST_VAR39_LEVELS = Z0 -FCST_VAR39_THRESH = ==0.1 -OBS_VAR39_NAME = PBL -OBS_VAR39_LEVELS = L0 -OBS_VAR39_THRESH = gt1500 -OBS_VAR39_OPTIONS = desc = "TKE"; - -# -# Forecast data description variables -# -FCST_IS_PROB = True -FCST_PROB_IN_GRIB_PDS = False - -# End of [config] section and start of [dir] section. -[dir] -# -# Directory containing observation input to PointStat. -# -OBS_POINT_STAT_INPUT_DIR = {{obs_input_dir}} -# -# Directory containing forecast input to PointStat. -# -FCST_POINT_STAT_INPUT_DIR = {{fcst_input_dir}} -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_DIR = -# -# Directory containing climatology mean input to PointStat. Not used in -# this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_DIR = -# -# Directory in which to write output from PointStat. -# -# OUTPUT_BASE apparently has to be set to something; it cannot be left -# to its default value. But it is not explicitly used elsewhere in this -# configuration file. -# -OUTPUT_BASE = {{output_base}} -POINT_STAT_OUTPUT_DIR = {{output_dir}} -# -# Directory for staging data. -# -STAGING_DIR = {{staging_dir}} - -# End of [dir] section and start of [filename_templates] section. -[filename_templates] -# -# Template for observation input to PointStat relative to -# OBS_POINT_STAT_INPUT_DIR. -# -OBS_POINT_STAT_INPUT_TEMPLATE = {{obs_input_fn_template}} -# -# Template for forecast input to PointStat relative to -# FCST_POINT_STAT_INPUT_DIR. -# -FCST_POINT_STAT_INPUT_TEMPLATE = {{fcst_input_fn_template}} -# -# Template for output from PointStat relative to POINT_STAT_OUTPUT_DIR. -# -POINT_STAT_OUTPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_MEAN_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_MEAN_INPUT_TEMPLATE = -# -# Template for climatology input to PointStat relative to -# POINT_STAT_CLIMO_STDEV_INPUT_DIR. Not used in this example. -# -POINT_STAT_CLIMO_STDEV_INPUT_TEMPLATE = diff --git a/parm/metplus/STATAnalysisConfig_skill_score b/parm/metplus/STATAnalysisConfig_skill_score index 6fd64a6456..fba1106d6b 100644 --- a/parm/metplus/STATAnalysisConfig_skill_score +++ b/parm/metplus/STATAnalysisConfig_skill_score @@ -11,10 +11,19 @@ // model = ["FV3_WoFS_v0_SUBCONUS_3km_test_mem000", "FV3_GFS_v16_SUBCONUS_3km"]; -fcst_lead = [ "6", - "6", - "6", - "6" +fcst_lead = [ "6", "12", + "6", "12", + "6", "12", + "6", "12", + "12", + "12", + "12", + "12", + "12", + "12", + "12", + "12", + "12" ]; obs_lead = []; @@ -42,17 +51,35 @@ obs_init_inc = []; obs_init_exc = []; obs_init_hour = []; -fcst_var = [ "WIND", - "DPT", - "TMP", - "PRMSL" +fcst_var = [ "PRMSL", "PRMSL", + "WIND", "WIND", + "DPT", "DPT", + "TMP", "TMP", + "WIND", + "WIND", + "WIND", + "TMP", + "TMP", + "TMP", + "SPFH", + "SPFH", + "SPFH" ]; obs_var = []; -fcst_lev = [ "Z10", - "Z2", - "Z2", - "Z0" +fcst_lev = [ "Z0", "Z0", + "Z10", "Z10", + "Z2", "Z2", + "Z2", "Z2", + "P250", + "P400", + "P850", + "P250", + "P400", + "P850", + "P300", + "P500", + "P850" ]; obs_lev = []; @@ -74,10 +101,19 @@ line_type = [ "SL1L2" ]; column = [ "RMSE" ]; -weight = [ 10.0, - 10.0, - 10.0, - 10.0 +weight = [ 10.0, 8.0, + 10.0, 8.0, + 10.0, 8.0, + 10.0, 8.0, + 4.0, + 4.0, + 4.0, + 4.0, + 4.0, + 4.0, + 4.0, + 4.0, + 4.0 ]; //////////////////////////////////////////////////////////////////////////////// @@ -116,6 +152,6 @@ hss_ec_value = NA; rank_corr_flag = FALSE; vif_flag = FALSE; tmp_dir = "/tmp"; -version = "V10.1.1"; +version = "V11.1.0"; //////////////////////////////////////////////////////////////////////////////// diff --git a/parm/metplus/metplus_macros.jinja b/parm/metplus/metplus_macros.jinja new file mode 100644 index 0000000000..4dc8c599ce --- /dev/null +++ b/parm/metplus/metplus_macros.jinja @@ -0,0 +1,78 @@ +{#- +Set the string delimiter that separates the forecast value of an item +(e.g. a field name, level, or threshold) from its observation value in the +various items in the deterministic and ensemble verification configuration +files. +#} +{%- macro set_delim_str() %} + {{-'%%'}} +{%- endmacro %} + +{#- +This macro prints out an error message and quits the jinja templater. +#} +{%- macro print_err_and_quit(error_msg) %} + {%- include 'ERROR: ' ~ error_msg %} +{%- endmacro %} +{#- +Given a specified field level that is really an accumulation period, this +macro prints out an "A" followed by the accumulation period (an integer) +with any leading zeros removed. For example, if the level is 'A03', it +prints out 'A3'. +#} +{%- macro get_accumulation_no_zero_pad(level) %} + {%- set first_char = level[0] %} + {%- set the_rest = level[1:] %} + {%- if (first_char == 'A') %} + {{- first_char ~ '%d'%the_rest|int }} + {%- else %} + {{- level }} + {%- endif %} +{%- endmacro %} + +{#- +This macro checks whether the specified threshold (input_thresh) has a +#} +{%- macro check_field_group(valid_field_groups, input_field_group) %} + {%- if input_field_group not in valid_field_groups %} + {%- set error_msg = '\n' ~ + 'The specified input field group (input_field_group) is not in the list of\n' ~ + 'valid field groups (valid_field_groups):\n' ~ + ' input_field_group = \'' ~ input_field_group ~ '\'\n' ~ + ' valid_field_groups = ' ~ valid_field_groups ~ '\n' ~ + 'Reset input_field_group to one of the elements in valid_field_groups and\n' ~ + 'rerun.' %} + {{print_err_and_quit(error_msg)}} + {%- endif %} +{%- endmacro %} + +{#- +This macro checks whether, for the given field, the list of thresholds +for all levels are identical. If not, it prints out an error message +and errors out. +#} +{%- macro check_for_identical_threshes_by_level(field, levels_threshes) %} + {%- set avail_levels = levels_threshes.keys()|list %} + {%- set num_avail_levels = avail_levels|length %} + {%- set threshes_by_avail_level = levels_threshes.values()|list %} + {%- for i in range(1,num_avail_levels) %} + {%- set level = avail_levels[i-1] %} + {%- set threshes = threshes_by_avail_level[i-1] %} + {%- set level_next = avail_levels[i] %} + {%- set threshes_next = threshes_by_avail_level[i] %} + {%- if (threshes_next != threshes) %} + {%- set error_msg = '\n\n' ~ +'For the given field (field), the set of thresholds for the next level\n' ~ +'(level_next, threshes_next) is not equal to that of the current level\n' ~ +'(level, threshes) (note that order of thresholds matters here):\n' ~ +' field = \'' ~ field ~ '\'\n' ~ +' num_avail_levels = ' ~ num_avail_levels ~ '\n' ~ +' level = \'' ~ level ~ '\'\n' ~ +' threshes = ' ~ threshes ~ '\n' ~ +' level_next = \'' ~ level_next ~ '\'\n' ~ +' threshes_next = ' ~ threshes_next ~ '\n' + %} + {{print_err_and_quit(error_msg)}} + {%- endif %} + {%- endfor %} +{%- endmacro %} diff --git a/parm/metplus/vx_config_det.yaml b/parm/metplus/vx_config_det.yaml new file mode 100644 index 0000000000..8ea3fd5e13 --- /dev/null +++ b/parm/metplus/vx_config_det.yaml @@ -0,0 +1,208 @@ +# +# This configuration file specifies the field groups, fields, levels, +# and thresholds to use for DETERMINISTIC verification. The format is +# as follows: +# +# FIELD_GROUP1: +# FIELD1: +# LEVEL1: list_of_thresholds +# LEVEL2: list_of_thresholds +# ... +# FIELD2: +# LEVEL1: list_of_thresholds +# LEVEL2: list_of_thresholds +# ... +# ... +# +# FIELD_GROUP2: +# FIELD1: +# LEVEL1: list_of_thresholds +# LEVEL2: list_of_thresholds +# ... +# FIELD2: +# LEVEL1: list_of_thresholds +# LEVEL2: list_of_thresholds +# ... +# ... +# +# ... +# +# If the threshold list for a given combination of field group, field, +# and level is set to the empty string ([]), then all values of that +# field will be included in the verification. +# +# Both the keys that represent field groups, fields, and levels and the +# strings in the list of thresholds may contain the separator string "%%" +# that separates the value of the quantity for the forecast from that for +# the observations. For example, if a field is set to +# +# RETOP%%EchoTop18 +# +# it means the name of the field in the forecast data is RETOP while its +# name in the observations is EchoTop18. +# +APCP: + APCP: + A1: ['gt0.0', 'ge0.254', 'ge0.508', 'ge1.27', 'ge2.54'] + A3: ['gt0.0', 'ge0.254', 'ge0.508', 'ge1.27', 'ge2.54', 'ge3.810', 'ge6.350'] + A6: ['gt0.0', 'ge0.254', 'ge0.508', 'ge1.27', 'ge2.54', 'ge3.810', 'ge6.350', 'ge8.890', 'ge12.700'] + A24: ['gt0.0', 'ge0.254', 'ge0.508', 'ge1.27', 'ge2.54', 'ge3.810', 'ge6.350', 'ge8.890', 'ge12.700', 'ge25.400'] +ASNOW: + ASNOW: + A6: ['gt0.0', 'ge2.54', 'ge5.08', 'ge10.16', 'ge20.32'] +REFC: + REFC%%MergedReflectivityQCComposite: + L0%%Z500: ['ge20', 'ge30', 'ge40', 'ge50'] +RETOP: + RETOP%%EchoTop18: + L0%%Z500: ['ge20', 'ge30', 'ge40', 'ge50'] +ADPSFC: + TMP: + Z2: [] + DPT: + Z2: [] + RH: + Z2: [] + UGRD: + Z10: ['ge2.572'] + VGRD: + Z10: ['ge2.572'] + WIND: + Z10: ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433'] + PRMSL: + Z0: [] + TCDC: + L0: [] + VIS: + L0: ['lt805', 'lt1609', 'lt4828', 'lt8045', 'ge8045', 'lt16090'] + GUST: + Z0: [] + HGT%%CEILING: + L0: ['lt152', 'lt305', 'lt914', 'lt1520', 'lt3040', 'ge914'] + SPFH: + Z2: [] + CRAIN%%PRWE: + L0%%Z0: ['ge1.0%%ge161&&le163'] + CSNOW%%PRWE: + L0%%Z0: ['ge1.0%%ge171&&le173'] + CFRZR%%PRWE: + L0%%Z0: ['ge1.0%%ge164&&le166'] + CICEP%%PRWE: + L0%%Z0: ['ge1.0%%ge174&&le176'] +ADPUPA: + TMP: + P1000: &adpupa_tmp_threshes + [] + P925: *adpupa_tmp_threshes + P850: *adpupa_tmp_threshes + P700: *adpupa_tmp_threshes + P500: *adpupa_tmp_threshes + P400: *adpupa_tmp_threshes + P300: *adpupa_tmp_threshes + P250: *adpupa_tmp_threshes + P200: *adpupa_tmp_threshes + P150: *adpupa_tmp_threshes + P100: *adpupa_tmp_threshes + P50: *adpupa_tmp_threshes + P20: *adpupa_tmp_threshes + P10: *adpupa_tmp_threshes + RH: + P1000: &adpupa_rh_threshes + [] + P925: *adpupa_rh_threshes + P850: *adpupa_rh_threshes + P700: *adpupa_rh_threshes + P500: *adpupa_rh_threshes + P400: *adpupa_rh_threshes + P300: *adpupa_rh_threshes + P250: *adpupa_rh_threshes + DPT: + P1000: &adpupa_dpt_threshes + [] + P925: *adpupa_dpt_threshes + P850: *adpupa_dpt_threshes + P700: *adpupa_dpt_threshes + P500: *adpupa_dpt_threshes + P400: *adpupa_dpt_threshes + P300: *adpupa_dpt_threshes + UGRD: + P1000: &adpupa_ugrd_threshes + ['ge2.572'] + P925: *adpupa_ugrd_threshes + P850: *adpupa_ugrd_threshes + P700: *adpupa_ugrd_threshes + P500: *adpupa_ugrd_threshes + P400: *adpupa_ugrd_threshes + P300: *adpupa_ugrd_threshes + P250: *adpupa_ugrd_threshes + P200: *adpupa_ugrd_threshes + P150: *adpupa_ugrd_threshes + P100: *adpupa_ugrd_threshes + P50: *adpupa_ugrd_threshes + P20: *adpupa_ugrd_threshes + P10: *adpupa_ugrd_threshes + VGRD: + P1000: &adpupa_vgrd_threshes + ['ge2.572'] + P925: *adpupa_vgrd_threshes + P850: *adpupa_vgrd_threshes + P700: *adpupa_vgrd_threshes + P500: *adpupa_vgrd_threshes + P400: *adpupa_vgrd_threshes + P300: *adpupa_vgrd_threshes + P250: *adpupa_vgrd_threshes + P200: *adpupa_vgrd_threshes + P150: *adpupa_vgrd_threshes + P100: *adpupa_vgrd_threshes + P50: *adpupa_vgrd_threshes + P20: *adpupa_vgrd_threshes + P10: *adpupa_vgrd_threshes + WIND: + P1000: &adpupa_wind_threshes + ['ge2.572', 'ge2.572&<5.144', 'ge5.144', 'ge10.288', 'ge15.433', 'ge20.577', 'ge25.722'] + P925: *adpupa_wind_threshes + P850: *adpupa_wind_threshes + P700: *adpupa_wind_threshes + P500: *adpupa_wind_threshes + P400: *adpupa_wind_threshes + P300: *adpupa_wind_threshes + P250: *adpupa_wind_threshes + P200: *adpupa_wind_threshes + P150: *adpupa_wind_threshes + P100: *adpupa_wind_threshes + P50: *adpupa_wind_threshes + P20: *adpupa_wind_threshes + P10: *adpupa_wind_threshes + HGT: + P1000: &adpupa_hgt_threshes + [] + P950: *adpupa_hgt_threshes + P925: *adpupa_hgt_threshes + P850: *adpupa_hgt_threshes + P700: *adpupa_hgt_threshes + P500: *adpupa_hgt_threshes + P400: *adpupa_hgt_threshes + P300: *adpupa_hgt_threshes + P250: *adpupa_hgt_threshes + P200: *adpupa_hgt_threshes + P150: *adpupa_hgt_threshes + P100: *adpupa_hgt_threshes + P50: *adpupa_hgt_threshes + P20: *adpupa_hgt_threshes + P10: *adpupa_hgt_threshes + SPFH: + P1000: &adpupa_spfh_threshes + [] + P850: *adpupa_spfh_threshes + P700: *adpupa_spfh_threshes + P500: *adpupa_spfh_threshes + P400: *adpupa_spfh_threshes + P300: *adpupa_spfh_threshes + CAPE: + L0%%L0-100000: ['gt500', 'gt1000', 'gt1500', 'gt2000', 'gt3000', 'gt4000'] + HPBL%%PBL: + Z0%%L0: [] + HGT%%PBL: + L0: [] + CAPE%%MLCAPE: + L0-90%%L0: ['gt500', 'gt1000', 'gt1500', 'gt2000', 'gt3000', 'gt4000'] diff --git a/parm/metplus/vx_config_ens.yaml b/parm/metplus/vx_config_ens.yaml new file mode 100644 index 0000000000..5f55254a4c --- /dev/null +++ b/parm/metplus/vx_config_ens.yaml @@ -0,0 +1,54 @@ +# +# This configuration file specifies the field groups, fields, levels, +# and thresholds to use for ENSEMBLE verification. The format is the +# same as the one used in the configuration file for deterministic +# verification (vx_config_det.yaml); please see the documentation in +# that file for details. +# +APCP: + APCP: + A1: ['gt0.0', 'ge0.254', 'ge0.508', 'ge2.54'] + A3: ['gt0.0', 'ge0.508', 'ge2.54', 'ge6.350'] + A6: ['gt0.0', 'ge2.54', 'ge6.350', 'ge12.700'] + A24: ['gt0.0', 'ge6.350', 'ge12.700', 'ge25.400'] +ASNOW: + ASNOW: + A6: ['gt0.0', 'ge2.54', 'ge5.08', 'ge10.16', 'ge20.32'] +REFC: + REFC%%MergedReflectivityQCComposite: + L0%%Z500: ['ge20', 'ge30', 'ge40', 'ge50'] +RETOP: + RETOP%%EchoTop18: + L0%%Z500: ['ge20', 'ge30', 'ge40', 'ge50'] +ADPSFC: + TMP: + Z2: ['ge268', 'ge273', 'ge278', 'ge293', 'ge298', 'ge303'] + DPT: + Z2: ['ge263', 'ge268', 'ge273', 'ge288', 'ge293', 'ge298'] + WIND: + Z10: ['ge5', 'ge10', 'ge15'] + TCDC: + L0: ['lt25', 'gt75'] + VIS: + L0: ['lt1609', 'lt8045', 'ge8045'] + HGT%%CEILING: + L0: ['lt152', 'lt305', 'lt914'] +ADPUPA: + TMP: + P850: ['ge288', 'ge293', 'ge298'] + P700: ['ge273', 'ge278', 'ge283'] + P500: ['ge258', 'ge263', 'ge268'] + DPT: + P850: ['ge273', 'ge278', 'ge283'] + P700: ['ge263', 'ge268', 'ge273'] + WIND: + P850: ['ge5', 'ge10', 'ge15'] + P700: ['ge10', 'ge15', 'ge20'] + P500: ['ge15', 'ge21', 'ge26'] + P250: ['ge26', 'ge31', 'ge36', 'ge46', 'ge62'] + HGT: + P500: ['ge5400', 'ge5600', 'ge5880'] + CAPE: + L0%%L0-100000: ['le1000', 'gt1000&<2500', 'ge2500&<4000', 'ge2500'] + HPBL%%PBL: + Z0%%L0: ['lt500', 'lt1500', 'gt1500'] diff --git a/parm/model_configure b/parm/model_configure index d22adf3f3a..aeb45f4719 100644 --- a/parm/model_configure +++ b/parm/model_configure @@ -1,3 +1,5 @@ +total_member: 1 +PE_MEMBER01: {{ PE_MEMBER01 }} start_year: {{ start_year }} start_month: {{ start_month }} start_day: {{ start_day }} @@ -11,6 +13,7 @@ ENS_SPS: .false. dt_atmos: {{ dt_atmos }} calendar: 'julian' memuse_verbose: .false. +atmos_nthreads: {{ atmos_nthreads }} restart_interval: {{ restart_interval }} output_1st_tstep_rst: .false. write_dopost: {{ write_dopost }} diff --git a/parm/nems.configure b/parm/ufs.configure similarity index 70% rename from parm/nems.configure rename to parm/ufs.configure index 14d9503c47..d90b7447f4 100644 --- a/parm/nems.configure +++ b/parm/ufs.configure @@ -1,5 +1,5 @@ ############################################# -#### NEMS Run-Time Configuration File ##### +#### UFS Run-Time Configuration File ##### ############################################# # ESMF # @@ -45,21 +45,8 @@ runSeq:: {% else %} # EARTH # EARTH_component_list: ATM -EARTH_attributes:: - Verbosity = 0 -:: - -# ATM # -ATM_model: fv3 -ATM_petlist_bounds: 0 {{ pe_member01_m1 }} -ATM_omp_num_threads: {{ atm_omp_num_threads }} -ATM_attributes:: - Verbosity = 0 - Diagnostic = 0 -:: - -# Run Sequence # -runSeq:: - ATM -:: + ATM_model: fv3 + runSeq:: + ATM + :: {% endif %} diff --git a/parm/wflow/aqm_post.yaml b/parm/wflow/aqm_post.yaml index 31b7b34848..48a0761fef 100644 --- a/parm/wflow/aqm_post.yaml +++ b/parm/wflow/aqm_post.yaml @@ -5,7 +5,7 @@ default_aqm_task: &default_aqm maxtries: '2' envars: &default_vars GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;' - USHdir: '&USHdir;' + HOMEdir: '&HOMEdir;' PDY: !cycstr "@Y@m@d" cyc: !cycstr "@H" nprocs: '{{ parent.nnodes * parent.ppn // 1 }}' @@ -22,21 +22,21 @@ default_aqm_task: &default_aqm task_pre_post_stat: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "pre_post_stat" "&JOBSdir;/JREGIONAL_PRE_POST_STAT"' + command: '&LOAD_MODULES_RUN_TASK; "pre_post_stat" "&HOMEdir;/jobs/JSRW_PRE_POST_STAT"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' dependency: or: datadep: attrs: age: 00:00:00:05 - text: !cycstr '&COMIN_DIR;/post_@Y@m@d@H_task_complete.txt' + text: !cycstr '&DATAROOT;/DATA_SHARE/@Y@m@d@H/post_@Y@m@d@H_task_complete.txt' metataskdep: attrs: metatask: run_ens_post task_post_stat_o3: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "post_stat_o3" "&JOBSdir;/JREGIONAL_POST_STAT_O3"' + command: '&LOAD_MODULES_RUN_TASK; "post_stat_o3" "&HOMEdir;/jobs/JSRW_POST_STAT_O3"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' memory: 120G dependency: @@ -46,7 +46,7 @@ task_post_stat_o3: task_post_stat_pm25: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "post_stat_pm25" "&JOBSdir;/JREGIONAL_POST_STAT_PM25"' + command: '&LOAD_MODULES_RUN_TASK; "post_stat_pm25" "&HOMEdir;/jobs/JSRW_POST_STAT_PM25"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' memory: 120G dependency: @@ -56,7 +56,7 @@ task_post_stat_pm25: task_bias_correction_o3: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "bias_correction_o3" "&JOBSdir;/JREGIONAL_BIAS_CORRECTION_O3"' + command: '&LOAD_MODULES_RUN_TASK; "bias_correction_o3" "&HOMEdir;/jobs/JSRW_BIAS_CORRECTION_O3"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' memory: 120G dependency: @@ -66,7 +66,7 @@ task_bias_correction_o3: task_bias_correction_pm25: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "bias_correction_pm25" "&JOBSdir;/JREGIONAL_BIAS_CORRECTION_PM25"' + command: '&LOAD_MODULES_RUN_TASK; "bias_correction_pm25" "&HOMEdir;/jobs/JSRW_BIAS_CORRECTION_PM25"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' memory: 120G dependency: diff --git a/parm/wflow/aqm_prep.yaml b/parm/wflow/aqm_prep.yaml index 6cfab161d7..d90bbde60f 100644 --- a/parm/wflow/aqm_prep.yaml +++ b/parm/wflow/aqm_prep.yaml @@ -5,12 +5,19 @@ default_aqm_task: &default_aqm maxtries: '2' envars: &default_vars GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;' - USHdir: '&USHdir;' + HOMEdir: '&HOMEdir;' + envir: '&envir;' + model_ver: '&model_ver;' + KEEPDATA: '&KEEPDATA;' + SENDCOM: '&SENDCOM;' + COMROOT: '&COMROOT;' + DATAROOT: '&DATAROOT;' + DCOMROOT: '&DCOMROOT;' + LOGDIR: !cycstr "&LOGDIR;" PDY: !cycstr "@Y@m@d" cyc: !cycstr "@H" nprocs: '{{ parent.nnodes * parent.ppn // 1 }}' subcyc: !cycstr "@M" - LOGDIR: !cycstr "&LOGDIR;" SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' native: '{{ platform.SCHED_NATIVE_CMD }}' nnodes: 1 @@ -22,7 +29,7 @@ default_aqm_task: &default_aqm task_nexus_gfs_sfc: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_gfs_sfc" "&JOBSdir;/JREGIONAL_NEXUS_GFS_SFC"' + command: '&LOAD_MODULES_RUN_TASK; "nexus_gfs_sfc" "&HOMEdir;/jobs/JSRW_NEXUS_GFS_SFC"' native: '{% if platform.get("SCHED_NATIVE_CMD_HPSS") %}{{ platform.SCHED_NATIVE_CMD_HPSS }}{% else %}{{ platform.SCHED_NATIVE_CMD}}{% endif %}' partition: '{% if platform.get("PARTITION_HPSS") %}&PARTITION_HPSS;{% else %}None{% endif %}' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' @@ -46,9 +53,9 @@ metatask_nexus_emission: nspt: '{% for h in range(0, cpl_aqm_parm.NUM_SPLIT_NEXUS) %}{{ " %02d" % h }}{% endfor %}' task_nexus_emission_#nspt#: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_emission" "&JOBSdir;/JREGIONAL_NEXUS_EMISSION"' + command: '&LOAD_MODULES_RUN_TASK; "nexus_emission" "&HOMEdir;/jobs/JSRW_NEXUS_EMISSION"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' - nnodes: 4 + nnodes: '{{ task_nexus_emission.NNODES_NEXUS_EMISSION }}' ppn: '{{ task_nexus_emission.PPN_NEXUS_EMISSION // 1 }}' walltime: 01:00:00 envars: @@ -61,7 +68,7 @@ metatask_nexus_emission: task_nexus_post_split: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "nexus_post_split" "&JOBSdir;/JREGIONAL_NEXUS_POST_SPLIT"' + command: '&LOAD_MODULES_RUN_TASK; "nexus_post_split" "&HOMEdir;/jobs/JSRW_NEXUS_POST_SPLIT"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' dependency: metataskdep: @@ -70,13 +77,13 @@ task_nexus_post_split: task_fire_emission: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "fire_emission" "&JOBSdir;/JREGIONAL_FIRE_EMISSION"' + command: '&LOAD_MODULES_RUN_TASK; "fire_emission" "&HOMEdir;/jobs/JSRW_FIRE_EMISSION"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' memory: 2G task_point_source: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "point_source" "&JOBSdir;/JREGIONAL_POINT_SOURCE"' + command: '&LOAD_MODULES_RUN_TASK; "point_source" "&HOMEdir;/jobs/JSRW_POINT_SOURCE"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' walltime: 01:00:00 dependency: @@ -94,11 +101,12 @@ task_aqm_ics_ext: attrs: cycledefs: at_start maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_ics" "&JOBSdir;/JREGIONAL_AQM_ICS"' + command: '&LOAD_MODULES_RUN_TASK; "aqm_ics" "&HOMEdir;/jobs/JSRW_AQM_ICS"' envars: <<: *default_vars PREV_CYCLE_DIR: '&WARMSTART_CYCLE_DIR;' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + memory: 64G dependency: and: taskdep: @@ -119,11 +127,12 @@ task_aqm_ics: attrs: cycledefs: cycled_from_second maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_ics" "&JOBSdir;/JREGIONAL_AQM_ICS"' + command: '&LOAD_MODULES_RUN_TASK; "aqm_ics" "&HOMEdir;/jobs/JSRW_AQM_ICS"' envars: <<: *default_vars PREV_CYCLE_DIR: '&COMIN_DIR;' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + memory: 64G dependency: and: taskdep: @@ -137,11 +146,11 @@ task_aqm_ics: datadep_tracer: attrs: age: 00:00:00:05 - text: &COMIN_DIR;/RESTART/fv_tracer.res.tile1.nc + text: '&COMIN_DIR;/RESTART/fv_tracer.res.tile1.nc' task_aqm_lbcs: <<: *default_aqm - command: '&LOAD_MODULES_RUN_TASK_FP; "aqm_lbcs" "&JOBSdir;/JREGIONAL_AQM_LBCS"' + command: '&LOAD_MODULES_RUN_TASK; "aqm_lbcs" "&HOMEdir;/jobs/JSRW_AQM_LBCS"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' ppn: 24 dependency: diff --git a/parm/wflow/coldstart.yaml b/parm/wflow/coldstart.yaml index e707b51d92..6fad0b8d83 100644 --- a/parm/wflow/coldstart.yaml +++ b/parm/wflow/coldstart.yaml @@ -20,7 +20,7 @@ default_task: &default_task task_get_extrn_ics: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "get_extrn_ics" "&JOBSdir;/JREGIONAL_GET_EXTRN_MDL_FILES"' + command: '&LOAD_MODULES_RUN_TASK; "get_extrn_ics" "&JOBSdir;/JREGIONAL_GET_EXTRN_MDL_FILES"' attrs: cycledefs: forecast maxtries: '2' @@ -28,7 +28,7 @@ task_get_extrn_ics: <<: *default_vars ICS_OR_LBCS: ICS join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' - memory: 2G + memory: 4G nnodes: 1 native: '{% if platform.get("SCHED_NATIVE_CMD_HPSS") %}{{ platform.SCHED_NATIVE_CMD_HPSS }}{% else %}{{ platform.SCHED_NATIVE_CMD}}{% endif %}' partition: '{% if platform.get("PARTITION_HPSS") %}&PARTITION_HPSS;{% else %}None{% endif %}' @@ -51,7 +51,7 @@ task_get_extrn_ics: task_get_extrn_lbcs: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "get_extrn_lbcs" "&JOBSdir;/JREGIONAL_GET_EXTRN_MDL_FILES"' + command: '&LOAD_MODULES_RUN_TASK; "get_extrn_lbcs" "&JOBSdir;/JREGIONAL_GET_EXTRN_MDL_FILES"' attrs: cycledefs: forecast maxtries: '2' @@ -59,7 +59,7 @@ task_get_extrn_lbcs: <<: *default_vars ICS_OR_LBCS: LBCS join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' - memory: 2G + memory: 4G nnodes: 1 native: '{% if platform.get("SCHED_NATIVE_CMD_HPSS") %}{{ platform.SCHED_NATIVE_CMD_HPSS }}{% else %}{{ platform.SCHED_NATIVE_CMD}}{% endif %}' partition: '{% if platform.get("PARTITION_HPSS") %}&PARTITION_HPSS;{% else %}None{% endif %}' @@ -85,7 +85,7 @@ metatask_run_ensemble: mem: '{% if global.DO_ENSEMBLE %}{%- for m in range(1, global.NUM_ENS_MEMBERS+1) -%}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' task_make_ics_mem#mem#: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "make_ics" "&JOBSdir;/JREGIONAL_MAKE_ICS"' + command: '&LOAD_MODULES_RUN_TASK; "make_ics" "&JOBSdir;/JREGIONAL_MAKE_ICS"' envars: <<: *default_vars SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' @@ -124,7 +124,7 @@ metatask_run_ensemble: task_make_lbcs_mem#mem#: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "make_lbcs" "&JOBSdir;/JREGIONAL_MAKE_LBCS"' + command: '&LOAD_MODULES_RUN_TASK; "make_lbcs" "&JOBSdir;/JREGIONAL_MAKE_LBCS"' envars: <<: *default_vars SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' @@ -142,7 +142,7 @@ metatask_run_ensemble: task_run_fcst_mem#mem#: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "run_fcst" "&JOBSdir;/JREGIONAL_RUN_FCST"' + command: '&LOAD_MODULES_RUN_TASK; "run_fcst" "&JOBSdir;/JREGIONAL_RUN_FCST"' envars: <<: *default_vars SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' @@ -186,29 +186,21 @@ metatask_run_ensemble: attrs: task: point_source or_aqm_ics: - and_no_aqm_ics: - not: - taskvalid: - attrs: - task: aqm_ics_ext - not: - taskvalid: - attrs: - task: aqm_ics - and_aqm_atstart: - taskvalid: - attrs: - task: aqm_ics_ext - taskdep: - attrs: - task: aqm_ics_ext - and_aqm_cycled: + not: taskvalid: attrs: task: aqm_ics - taskdep: + taskdep: + attrs: + task: aqm_ics + or_aqm_ics_ext: + not: + taskvalid: attrs: - task: aqm_ics + task: aqm_ics_ext + taskdep: + attrs: + task: aqm_ics_ext or_aqm_lbcs: not: taskvalid: diff --git a/parm/wflow/default_workflow.yaml b/parm/wflow/default_workflow.yaml index 2451781e6b..e37fdae1ea 100644 --- a/parm/wflow/default_workflow.yaml +++ b/parm/wflow/default_workflow.yaml @@ -5,18 +5,18 @@ rocoto: entities: ACCOUNT: '{{ user.ACCOUNT }}' CCPA_OBS_DIR: '{{ platform.CCPA_OBS_DIR }}' - COMIN_DIR: '{% if user.RUN_ENVIR == "nco" %}{{"{}/{}.@Y@m@d/@H".format(nco.COMIN_BASEDIR,nco.RUN_default)}}{% else %}{{"{}/@Y@m@d@H".format(workflow.EXPTDIR)}}{% endif %}' + COLDSTART: '{{ workflow.COLDSTART }}' COMINgfs: '{{ platform.get("COMINgfs") }}' - FCST_DIR: '{% if user.RUN_ENVIR == "nco" %}{{"{}/run_fcst_mem#mem#.{}_@Y@m@d@H".format(nco.DATAROOT_default,workflow.WORKFLOW_ID)}}{% else %}{{"{}/@Y@m@d@H".format(workflow.EXPTDIR)}}{% endif %}' GLOBAL_VAR_DEFNS_FP: '{{ workflow.GLOBAL_VAR_DEFNS_FP }}' + HOMEdir: '{{ user.HOMEdir }}' JOBSdir: '{{ user.JOBSdir }}' - LOAD_MODULES_RUN_TASK_FP: '{{ workflow.LOAD_MODULES_RUN_TASK_FP }}' - LOGDIR: '{% if user.RUN_ENVIR == "nco" %}{{"{}/@Y@m@d".format(nco.LOGBASEDIR_default)}}{% else %}{{nco.LOGBASEDIR_default }}{% endif %}' - LOGEXT: '{% if user.RUN_ENVIR == "nco" %}{{".{}.log".format(workflow.WORKFLOW_ID)}}{% else %}{{".log"}}{% endif %}' + KEEPDATA: '{{ nco.KEEPDATA_default }}' + LOAD_MODULES_RUN_TASK: '{{ workflow.LOAD_MODULES_RUN_TASK_FP }} {{ user.MACHINE }}' + LOGEXT: ".log" + NET: '{{ nco.NET_default }}' MRMS_OBS_DIR: '{{ platform.MRMS_OBS_DIR }}' NCORES_PER_NODE: '{{ platform.NCORES_PER_NODE }}' NDAS_OBS_DIR: '{{ platform.NDAS_OBS_DIR }}' - NET: '{{ nco.NET_default }}' NOHRSC_OBS_DIR: '{{ platform.NOHRSC_OBS_DIR }}' PARTITION_DEFAULT: '{{ platform.get("PARTITION_DEFAULT") }}' PARTITION_FCST: '{{ platform.get("PARTITION_FCST") }}' @@ -26,11 +26,20 @@ rocoto: QUEUE_HPSS: '{{ platform.get("QUEUE_HPSS") }}' RUN: '{{ nco.RUN_default }}' SCRIPTSdir: '{{ user.SCRIPTSdir }}' + SENDCOM: '{{ nco.SENDCOM_default }}' SLASH_ENSMEM_SUBDIR: '{% if global.DO_ENSEMBLE %}{{ "/mem#mem#" }}{% else %}{{ "/" }}{% endif %}' USHdir: '{{ user.USHdir }}' - COLDSTART: '{{ workflow.COLDSTART }}' WARMSTART_CYCLE_DIR: '{{ workflow.WARMSTART_CYCLE_DIR }}' WORKFLOW_ID: '{{ workflow.WORKFLOW_ID }}' + + envir: '{{ nco.envir_default }}' + model_ver: '{{ nco.model_ver_default }}' + COMROOT: '{{ nco.PTMP }}/&envir;/com' + DATAROOT: '{{ nco.PTMP }}/&envir;/tmp' + DCOMROOT: '{{ nco.PTMP }}/&envir;/dcom' + COMIN_DIR: '{% if user.RUN_ENVIR == "nco" %}&COMROOT;/&NET;/&model_ver;/&RUN;.@Y@m@d/@H{% else %}{{ workflow.EXPTDIR }}/@Y@m@d@H{% endif %}' + FCST_DIR: '{% if user.RUN_ENVIR == "nco" %}&DATAROOT;/run_fcst_mem#mem#_@Y@m@d@H{% else %}{{ workflow.EXPTDIR }}/@Y@m@d@H{% endif %}' + LOGDIR: '{% if user.RUN_ENVIR == "nco" %}&COMROOT;/output/logs/@Y@m@d{% else %}{{ workflow.EXPTDIR }}/log{% endif %}' attrs: cyclethrottle: "200" realtime: "F" @@ -45,4 +54,4 @@ rocoto: - !startstopfreq ['{%- if workflow.DATE_FIRST_CYCL != workflow.DATE_LAST_CYCL %}{{ [workflow.DATE_FIRST_CYCL[0:8], "{:02d}".format(workflow.INCR_CYCL_FREQ)]|join }}{%- else %}{{workflow.DATE_FIRST_CYCL}}{%- endif %}', '{{workflow.DATE_LAST_CYCL}}', '{{workflow.INCR_CYCL_FREQ}}'] log: !cycstr '&LOGDIR;/FV3LAM_wflow.{% if user.RUN_ENVIR == "nco" %}{{ workflow.WORKFLOW_ID + "." }}{% endif %}log' tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/test.yaml"]|include }}' diff --git a/parm/wflow/plot.yaml b/parm/wflow/plot.yaml index 6dad3e0dfa..8448bc3f9e 100644 --- a/parm/wflow/plot.yaml +++ b/parm/wflow/plot.yaml @@ -12,10 +12,12 @@ default_task_plot: &default_task PDY: !cycstr "@Y@m@d" cyc: !cycstr "@H" subcyc: !cycstr "@M" + fhr: '#fhr#' LOGDIR: !cycstr "&LOGDIR;" SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' ENSMEM_INDX: '#mem#' - nprocs: '{{ nnodes * ppn }}' + nprocs: '{{ parent.nnodes * parent.ppn }}' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' native: '{{ platform.SCHED_NATIVE_CMD }}' nnodes: 1 nodes: '{{ nnodes }}:ppn={{ ppn }}' @@ -24,25 +26,31 @@ default_task_plot: &default_task queue: '&QUEUE_DEFAULT;' walltime: 01:00:00 -task_plot_allvars: - <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "plot_allvars" "&JOBSdir;/JREGIONAL_PLOT_ALLVARS"' - join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' - dependency: - or_do_post: &post_files_exist - and_run_post: # If post was meant to run, wait on the whole post metatask - taskvalid: - attrs: - task: run_post_mem000_f000 - metataskdep: - attrs: - metatask: run_ens_post - and_inline_post: # If inline post ran, wait on the forecast task to complete - not: - taskvalid: - attrs: - task: run_post_mem000_f000 - taskdep: - attrs: - task: run_fcst_mem000 +metatask_plot_allvars: + var: + mem: '{% if global.DO_ENSEMBLE %}{%- for m in range(1, global.NUM_ENS_MEMBERS+1) -%}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' + metatask_plot_allvars_mem#mem#_all_fhrs: + var: + fhr: '{% for h in range(0, workflow.LONG_FCST_LEN+1) %}{{ " %03d" % h }}{% endfor %}' + cycledef: '{% for h in range(0, workflow.LONG_FCST_LEN+1) %}{% if h <= workflow.FCST_LEN_CYCL|min %}forecast {% else %}long_forecast {% endif %}{% endfor %}' + task_plot_allvars_mem#mem#_f#fhr#: + <<: *default_task + command: '&LOAD_MODULES_RUN_TASK; "plot_allvars" "&JOBSdir;/JREGIONAL_PLOT_ALLVARS"' + dependency: + or_do_post: &post_files_exist + and_run_post: # If post was meant to run, wait on the whole post metatask + taskvalid: + attrs: + task: run_post_mem#mem#_f#fhr# + metataskdep: + attrs: + metatask: run_ens_post + and_inline_post: # If inline post ran, wait on the forecast task to complete + not: + taskvalid: + attrs: + task: run_post_mem#mem#_f#fhr# + taskdep: + attrs: + task: run_post_mem#mem#_f#fhr# diff --git a/parm/wflow/post.yaml b/parm/wflow/post.yaml index 5672e7343f..114e5de377 100644 --- a/parm/wflow/post.yaml +++ b/parm/wflow/post.yaml @@ -3,7 +3,7 @@ default_task_post: &default_task attrs: cycledefs: '#cycledef#' maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_post" "&JOBSdir;/JREGIONAL_RUN_POST"' + command: '&LOAD_MODULES_RUN_TASK; "run_post" "&JOBSdir;/JREGIONAL_RUN_POST"' envars: &default_vars GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;' USHdir: '&USHdir;' diff --git a/parm/wflow/prdgen.yaml b/parm/wflow/prdgen.yaml index 6b9f7cd4f6..3f2026a45f 100644 --- a/parm/wflow/prdgen.yaml +++ b/parm/wflow/prdgen.yaml @@ -10,7 +10,7 @@ metatask_run_prdgen: attrs: cycledefs: '#cycledef#' maxtries: 1 - command: '&LOAD_MODULES_RUN_TASK_FP; "run_prdgen" "&JOBSdir;/JREGIONAL_RUN_PRDGEN"' + command: '&LOAD_MODULES_RUN_TASK; "run_prdgen" "&JOBSdir;/JREGIONAL_RUN_PRDGEN"' envars: GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;' USHdir: '&USHdir;' diff --git a/parm/wflow/prep.yaml b/parm/wflow/prep.yaml index c9d5549909..a0c6e3119a 100644 --- a/parm/wflow/prep.yaml +++ b/parm/wflow/prep.yaml @@ -24,12 +24,12 @@ default_task_prep: &default_task task_make_grid: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "make_grid" "&JOBSdir;/JREGIONAL_MAKE_GRID"' + command: '&LOAD_MODULES_RUN_TASK; "make_grid" "&JOBSdir;/JREGIONAL_MAKE_GRID"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' task_make_orog: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "make_orog" "&JOBSdir;/JREGIONAL_MAKE_OROG"' + command: '&LOAD_MODULES_RUN_TASK; "make_orog" "&JOBSdir;/JREGIONAL_MAKE_OROG"' join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' dependency: or: &make_grid_satisfied @@ -47,7 +47,7 @@ task_make_orog: task_make_sfc_climo: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK_FP; "make_sfc_climo" "&JOBSdir;/JREGIONAL_MAKE_SFC_CLIMO"' + command: '&LOAD_MODULES_RUN_TASK; "make_sfc_climo" "&JOBSdir;/JREGIONAL_MAKE_SFC_CLIMO"' envars: <<: *default_envars join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' diff --git a/parm/wflow/test.yaml b/parm/wflow/test.yaml new file mode 100644 index 0000000000..9c084d6875 --- /dev/null +++ b/parm/wflow/test.yaml @@ -0,0 +1,42 @@ +# Settings that will run tasks needed per-experiment to create "fix +# files" for the stationary grid. + +default_task_test: &default_task + account: '&ACCOUNT;' + attrs: + cycledefs: forecast + maxtries: '2' + envars: &default_envars + GLOBAL_VAR_DEFNS_FP: '&GLOBAL_VAR_DEFNS_FP;' + USHdir: '&USHdir;' + FCST_DIR: !cycstr '&FCST_DIR;' + PDY: !cycstr "@Y@m@d" + cyc: !cycstr "@H" + subcyc: !cycstr "@M" + LOGDIR: !cycstr "&LOGDIR;" + SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' + ENSMEM_INDX: '#mem#' + native: '{{ platform.SCHED_NATIVE_CMD }}' + nnodes: 1 + nodes: '{{ nnodes }}:ppn={{ ppn }}' + partition: '{% if platform.get("PARTITION_DEFAULT") %}&PARTITION_DEFAULT;{% else %}None{% endif %}' + ppn: 24 + queue: '&QUEUE_DEFAULT;' + walltime: 00:05:00 + +metatask_integration_test: + var: + mem: '{% if global.DO_ENSEMBLE %}{%- for m in range(1, global.NUM_ENS_MEMBERS+1) -%}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' + task_integration_test_mem#mem#: + <<: *default_task + command: '&LOAD_MODULES_RUN_TASK; "integration_test" "&JOBSdir;/JREGIONAL_INTEGRATION_TEST"' + join: !cycstr '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' + dependency: + and_run_fcst: + taskvalid: + attrs: + task: run_fcst_mem#mem# + taskdep: + attrs: + task: run_fcst_mem#mem# + diff --git a/parm/wflow/verify_det.yaml b/parm/wflow/verify_det.yaml index 4c6b43ca25..a62adb4481 100644 --- a/parm/wflow/verify_det.yaml +++ b/parm/wflow/verify_det.yaml @@ -31,7 +31,7 @@ metatask_GridStat_CCPA_all_accums_all_mems: <<: *default_task_verify_det attrs: maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' envars: <<: *default_vars OBS_DIR: '&CCPA_OBS_DIR;' @@ -41,6 +41,8 @@ metatask_GridStat_CCPA_all_accums_all_mems: OBTYPE: 'CCPA' ENSMEM_INDX: "#mem#" SLASH_ENSMEM_SUBDIR_OR_NULL: '{% if global.DO_ENSEMBLE %}{{ "/mem#mem#" }}{% endif %}' + FCST_LEVEL: 'A#ACCUM_HH#' + FCST_THRESH: 'all' walltime: 02:00:00 dependency: and: @@ -61,7 +63,7 @@ metatask_GridStat_NOHRSC_all_accums_all_mems: <<: *default_task_verify_det attrs: maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' envars: <<: *default_vars OBS_DIR: '&NOHRSC_OBS_DIR;' @@ -71,12 +73,14 @@ metatask_GridStat_NOHRSC_all_accums_all_mems: OBTYPE: 'NOHRSC' ENSMEM_INDX: "#mem#" SLASH_ENSMEM_SUBDIR_OR_NULL: '{% if global.DO_ENSEMBLE %}{{ "/mem#mem#" }}{% endif %}' + FCST_LEVEL: 'A#ACCUM_HH#' + FCST_THRESH: 'all' walltime: 02:00:00 dependency: and: - taskdep: + taskdep_get_obs_nohrsc: attrs: - task: get_obs_mrms + task: get_obs_nohrsc taskdep_pcpcombine_fcst: attrs: task: run_MET_PcpCombine_fcst_ASNOW#ACCUM_HH#h_mem#mem# @@ -89,7 +93,7 @@ metatask_GridStat_MRMS_all_mems: VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["REFC", "RETOP"] %}{{ "%s " % var }}{% endif %}{% endfor %}' task_run_MET_GridStat_vx_#VAR#_mem#mem#: <<: *default_task_verify_det - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' envars: <<: *default_vars OBS_DIR: '&MRMS_OBS_DIR;' @@ -99,10 +103,12 @@ metatask_GridStat_MRMS_all_mems: OBTYPE: 'MRMS' ENSMEM_INDX: "#mem#" SLASH_ENSMEM_SUBDIR_OR_NULL: '{% if global.DO_ENSEMBLE %}{{ "/mem#mem#" }}{% endif %}' + FCST_LEVEL: 'L0' + FCST_THRESH: 'all' walltime: 02:00:00 dependency: and: - taskdep: + taskdep_get_obs_mrms: attrs: task: get_obs_mrms datadep_post_files_exist: @@ -118,7 +124,7 @@ metatask_PointStat_NDAS_all_mems: VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["ADPSFC", "ADPUPA"] %}{{ "%s " % var }}{% endif %}{% endfor %}' task_run_MET_PointStat_vx_#VAR#_mem#mem#: <<: *default_task_verify_det - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX"' envars: <<: *default_vars OBS_DIR: '&NDAS_OBS_DIR;' @@ -128,6 +134,8 @@ metatask_PointStat_NDAS_all_mems: ACCUM_HH: '01' ENSMEM_INDX: "#mem#" SLASH_ENSMEM_SUBDIR_OR_NULL: '{% if global.DO_ENSEMBLE %}{{ "/mem#mem#" }}{% endif %}' + FCST_LEVEL: 'all' + FCST_THRESH: 'all' walltime: 01:00:00 dependency: and: diff --git a/parm/wflow/verify_ens.yaml b/parm/wflow/verify_ens.yaml index cf0a8d1dac..71bc20b3b0 100644 --- a/parm/wflow/verify_ens.yaml +++ b/parm/wflow/verify_ens.yaml @@ -26,7 +26,7 @@ metatask_GenEnsProd_EnsembleStat_CCPA: ACCUM_HH: '{% for ah in verification.VX_APCP_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' task_run_MET_GenEnsProd_vx_APCP#ACCUM_HH#h: &task_GenEnsProd_CCPA <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' envars: &envars_GenEnsProd_CCPA <<: *default_vars ACCUM_HH: '#ACCUM_HH#' @@ -34,6 +34,8 @@ metatask_GenEnsProd_EnsembleStat_CCPA: VAR: APCP METPLUSTOOLNAME: 'GENENSPROD' OBTYPE: 'CCPA' + FCST_LEVEL: 'A#ACCUM_HH#' + FCST_THRESH: 'all' dependency: and: # The PcpCombine task for obs must be complete because this GenEnsProd @@ -50,6 +52,7 @@ metatask_GenEnsProd_EnsembleStat_CCPA: envars: <<: *envars_GenEnsProd_CCPA METPLUSTOOLNAME: 'ENSEMBLESTAT' + FCST_THRESH: 'none' dependency: taskdep_genensprod: attrs: @@ -60,7 +63,7 @@ metatask_GenEnsProd_EnsembleStat_NOHRSC: ACCUM_HH: '{% for ah in verification.VX_ASNOW_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' task_run_MET_GenEnsProd_vx_ASNOW#ACCUM_HH#h: &task_GenEnsProd_NOHRSC <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' envars: &envars_GenEnsProd_NOHRSC <<: *default_vars ACCUM_HH: '#ACCUM_HH#' @@ -68,6 +71,8 @@ metatask_GenEnsProd_EnsembleStat_NOHRSC: VAR: ASNOW METPLUSTOOLNAME: 'GENENSPROD' OBTYPE: 'NOHRSC' + FCST_LEVEL: 'A#ACCUM_HH#' + FCST_THRESH: 'all' dependency: and: # The PcpCombine task for obs must be complete because this GenEnsProd @@ -81,6 +86,7 @@ metatask_GenEnsProd_EnsembleStat_NOHRSC: envars: <<: *envars_GenEnsProd_NOHRSC METPLUSTOOLNAME: 'ENSEMBLESTAT' + FCST_THRESH: 'none' dependency: and: taskdep: @@ -95,7 +101,7 @@ metatask_GenEnsProd_EnsembleStat_MRMS: VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["REFC", "RETOP"] %}{{ "%s " % var }}{% endif %}{% endfor %}' task_run_MET_GenEnsProd_vx_#VAR#: &task_GenEnsProd_MRMS <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' envars: &envars_GenEnsProd_MRMS <<: *default_vars ACCUM_HH: '01' @@ -103,31 +109,35 @@ metatask_GenEnsProd_EnsembleStat_MRMS: VAR: '#VAR#' METPLUSTOOLNAME: 'GENENSPROD' OBTYPE: 'MRMS' + FCST_LEVEL: 'L0' + FCST_THRESH: 'all' dependency: and: - taskdep: - attrs: - task: get_obs_mrms - metataskdep_post_files_exist: &post_files_exist + metataskdep_check_post_output: &check_post_output attrs: metatask: check_post_output_all_mems - task_run_MET_EnsembleStat_vx_#VAR#: <<: *task_GenEnsProd_MRMS envars: <<: *envars_GenEnsProd_MRMS METPLUSTOOLNAME: 'ENSEMBLESTAT' + FCST_LEVEL: 'L0' + FCST_THRESH: 'none' dependency: - taskdep: - attrs: - task: run_MET_GenEnsProd_vx_#VAR# + and: + taskdep_get_obs_mrms: + attrs: + task: get_obs_mrms + taskdep_genensprod: + attrs: + task: run_MET_GenEnsProd_vx_#VAR# metatask_GenEnsProd_EnsembleStat_NDAS: var: VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["ADPSFC", "ADPUPA"] %}{{ "%s " % var }}{% endif %}{% endfor %}' task_run_MET_GenEnsProd_vx_#VAR#: &task_GenEnsProd_NDAS <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GENENSPROD_OR_ENSEMBLESTAT"' envars: &envars_GenEnsProd_NDAS <<: *default_vars OBS_DIR: '&NDAS_OBS_DIR;' @@ -135,6 +145,8 @@ metatask_GenEnsProd_EnsembleStat_NDAS: METPLUSTOOLNAME: 'GENENSPROD' OBTYPE: 'NDAS' ACCUM_HH: '01' + FCST_LEVEL: 'all' + FCST_THRESH: 'all' walltime: 02:30:00 dependency: and: @@ -144,8 +156,8 @@ metatask_GenEnsProd_EnsembleStat_NDAS: taskdep_pb2nc: attrs: task: run_MET_Pb2nc_obs - metataskdep_post_files_exist: - <<: *post_files_exist + metataskdep_check_post_output: + <<: *check_post_output task_run_MET_EnsembleStat_vx_#VAR#: <<: *task_GenEnsProd_NDAS envars: @@ -166,7 +178,7 @@ metatask_GridStat_CCPA_ensmeanprob_all_accums: ACCUM_HH: '{% for ah in verification.VX_APCP_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' task_run_MET_GridStat_vx_ens#statlc#_APCP#ACCUM_HH#h: <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"' envars: <<: *default_vars OBS_DIR: '&CCPA_OBS_DIR;' @@ -174,6 +186,8 @@ metatask_GridStat_CCPA_ensmeanprob_all_accums: METPLUSTOOLNAME: 'GRIDSTAT' OBTYPE: 'CCPA' ACCUM_HH: '#ACCUM_HH#' + FCST_LEVEL: 'A#ACCUM_HH#' + FCST_THRESH: 'all' dependency: taskdep: attrs: @@ -188,7 +202,7 @@ metatask_GridStat_NOHRSC_ensmeanprob_all_accums: ACCUM_HH: '{% for ah in verification.VX_ASNOW_ACCUMS_HRS %}{% if workflow.FCST_LEN_HRS >= ah %}{{ "%02d " % ah }}{% endif %}{% endfor %}' task_run_MET_GridStat_vx_ens#statlc#_ASNOW#ACCUM_HH#h: <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"' envars: <<: *default_vars OBS_DIR: '&NOHRSC_OBS_DIR;' @@ -196,6 +210,8 @@ metatask_GridStat_NOHRSC_ensmeanprob_all_accums: METPLUSTOOLNAME: 'GRIDSTAT' OBTYPE: 'NOHRSC' ACCUM_HH: '#ACCUM_HH#' + FCST_LEVEL: 'A#ACCUM_HH#' + FCST_THRESH: 'all' dependency: taskdep: attrs: @@ -206,7 +222,7 @@ metatask_GridStat_MRMS_ensprob: VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["REFC", "RETOP"] %}{{ "%s " % var }}{% endif %}{% endfor %}' task_run_MET_GridStat_vx_ensprob_#VAR#: <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENSPROB"' envars: <<: *default_vars ACCUM_HH: '01' @@ -214,6 +230,8 @@ metatask_GridStat_MRMS_ensprob: VAR: '#VAR#' METPLUSTOOLNAME: 'GRIDSTAT' OBTYPE: 'MRMS' + FCST_LEVEL: 'L0' + FCST_THRESH: 'all' dependency: taskdep: attrs: @@ -228,7 +246,7 @@ metatask_PointStat_NDAS_ensmeanprob: VAR: '{% for var in verification.VX_FIELDS %}{% if var in ["ADPSFC", "ADPUPA"] %}{{ "%s " % var }}{% endif %}{% endfor %}' task_run_MET_PointStat_vx_ens#statlc#_#VAR#: <<: *default_task_verify_ens - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_GRIDSTAT_OR_POINTSTAT_VX_ENS#stat#"' envars: <<: *default_vars OBS_DIR: '&NDAS_OBS_DIR;' @@ -236,6 +254,8 @@ metatask_PointStat_NDAS_ensmeanprob: METPLUSTOOLNAME: 'POINTSTAT' OBTYPE: 'NDAS' ACCUM_HH: '01' + FCST_LEVEL: 'all' + FCST_THRESH: 'all' dependency: taskdep: attrs: diff --git a/parm/wflow/verify_pre.yaml b/parm/wflow/verify_pre.yaml index eb1a7eb796..0d4e1c2448 100644 --- a/parm/wflow/verify_pre.yaml +++ b/parm/wflow/verify_pre.yaml @@ -23,7 +23,7 @@ default_task_verify_pre: &default_task_verify_pre task_get_obs_ccpa: <<: *default_task_verify_pre - command: '&LOAD_MODULES_RUN_TASK_FP; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' + command: '&LOAD_MODULES_RUN_TASK; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' envars: <<: *default_vars ACCUM_HH: '01' @@ -37,7 +37,7 @@ task_get_obs_ccpa: task_get_obs_nohrsc: <<: *default_task_verify_pre - command: '&LOAD_MODULES_RUN_TASK_FP; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' + command: '&LOAD_MODULES_RUN_TASK; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' envars: <<: *default_vars OBS_DIR: '&NOHRSC_OBS_DIR;' @@ -50,7 +50,7 @@ task_get_obs_nohrsc: task_get_obs_mrms: <<: *default_task_verify_pre - command: '&LOAD_MODULES_RUN_TASK_FP; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' + command: '&LOAD_MODULES_RUN_TASK; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' envars: <<: *default_vars OBS_DIR: '&MRMS_OBS_DIR;' @@ -69,7 +69,7 @@ task_get_obs_ndas: OBS_DIR: '&NDAS_OBS_DIR;' OBTYPE: 'NDAS' FHR: '{% for h in range(0, workflow.FCST_LEN_HRS+1) %}{{ " %02d" % h }}{% endfor %}' - command: '&LOAD_MODULES_RUN_TASK_FP; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' + command: '&LOAD_MODULES_RUN_TASK; "get_obs" "&JOBSdir;/JREGIONAL_GET_VERIF_OBS"' queue: "&QUEUE_HPSS;" native: '{% if platform.get("SCHED_NATIVE_CMD_HPSS") %}{{ platform.SCHED_NATIVE_CMD_HPSS }}{% else %}{{ platform.SCHED_NATIVE_CMD}}{% endif %}' partition: '{% if platform.get("PARTITION_HPSS") %}&PARTITION_HPSS;{% else %}None{% endif %}' @@ -80,12 +80,12 @@ task_run_MET_Pb2nc_obs: attrs: cycledefs: forecast maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PB2NC_OBS"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PB2NC_OBS"' envars: <<: *default_vars VAR: ADPSFC ACCUM_HH: '01' - obs_or_fcst: obs + FCST_OR_OBS: OBS OBTYPE: NDAS OBS_DIR: '&NDAS_OBS_DIR;' METPLUSTOOLNAME: 'PB2NC' @@ -110,12 +110,12 @@ metatask_PcpCombine_obs: attrs: cycledefs: forecast maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' envars: <<: *default_vars VAR: APCP ACCUM_HH: '#ACCUM_HH#' - obs_or_fcst: obs + FCST_OR_OBS: OBS OBTYPE: CCPA OBS_DIR: '&CCPA_OBS_DIR;' METPLUSTOOLNAME: 'PCPCOMBINE' @@ -140,7 +140,7 @@ metatask_check_post_output_all_mems: attrs: cycledefs: forecast maxtries: '1' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_CHECK_POST_OUTPUT"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_CHECK_POST_OUTPUT"' envars: <<: *default_vars VAR: APCP @@ -221,12 +221,12 @@ metatask_PcpCombine_fcst_APCP_all_accums_all_mems: attrs: cycledefs: forecast maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' envars: <<: *default_vars VAR: APCP ACCUM_HH: '#ACCUM_HH#' - obs_or_fcst: fcst + FCST_OR_OBS: FCST OBTYPE: CCPA OBS_DIR: '&CCPA_OBS_DIR;' METPLUSTOOLNAME: 'PCPCOMBINE' @@ -249,12 +249,12 @@ metatask_PcpCombine_fcst_ASNOW_all_accums_all_mems: attrs: cycledefs: forecast maxtries: '2' - command: '&LOAD_MODULES_RUN_TASK_FP; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' + command: '&LOAD_MODULES_RUN_TASK; "run_vx" "&JOBSdir;/JREGIONAL_RUN_MET_PCPCOMBINE"' envars: <<: *default_vars VAR: ASNOW ACCUM_HH: '#ACCUM_HH#' - obs_or_fcst: fcst + FCST_OR_OBS: FCST OBTYPE: NOHRSC OBS_DIR: '&NOHRSC_OBS_DIR;' METPLUSTOOLNAME: 'PCPCOMBINE' diff --git a/scripts/exregional_check_post_output.sh b/scripts/exregional_check_post_output.sh index 1352d38789..320311cc94 100755 --- a/scripts/exregional_check_post_output.sh +++ b/scripts/exregional_check_post_output.sh @@ -1,5 +1,43 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The ex-script for checking the post output. +# +# Run-time environment variables: +# +# ACCUM_HH +# CDATE +# ENSMEM_INDX +# GLOBAL_VAR_DEFNS_FP +# VAR +# +# Experiment variables +# +# user: +# USHdir +# +# workflow: +# FCST_LEN_HRS +# +# global: +# DO_ENSEMBLE +# ENS_TIME_LAG_HRS +# +# verification: +# FCST_FN_TEMPLATE +# FCST_SUBDIR_TEMPLATE +# NUM_MISSING_FCST_FILES_MAX +# VX_FCST_INPUT_BASEDIR +# VX_NDIGITS_ENSMEM_NAMES +# +# constants: +# SECS_PER_HOUR +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +46,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_met_pcpcombine|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco workflow global verification constants task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -50,10 +90,11 @@ print_info_msg " Entering script: \"${scrfunc_fn}\" In directory: \"${scrfunc_dir}\" -This is the ex-script for the task that checks that all the post-processed -output files in fact exist and are at least a certain age. These files -may have been generated by UPP as part of the current SRW App workflow, -or they may be user-staged. +This is the ex-script for the task that checks that no more than +NUM_MISSING_FCST_FILES_MAX of each forecast's (ensemble member's) post- +processed output files are missing. Note that such files may have been +generated by UPP as part of the current SRW App workflow, or they may be +user-staged. ========================================================================" # #----------------------------------------------------------------------- @@ -63,7 +104,7 @@ or they may be user-staged. #----------------------------------------------------------------------- # i="0" -if [ "${DO_ENSEMBLE}" = "TRUE" ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]; then i=$( bc -l <<< "${ENSMEM_INDX}-1" ) fi time_lag=$( bc -l <<< "${ENS_TIME_LAG_HRS[$i]}*${SECS_PER_HOUR}" ) diff --git a/scripts/exregional_fire_emission.sh b/scripts/exregional_fire_emission.sh deleted file mode 100755 index e1d2101477..0000000000 --- a/scripts/exregional_fire_emission.sh +++ /dev/null @@ -1,198 +0,0 @@ -#!/usr/bin/env bash - -# -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_fire_emission" ${GLOBAL_VAR_DEFNS_FP} -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the ex-script for the task that fetches fire emission -data files from disk or generates model-ready RAVE emission file from raw -data files. -========================================================================" -# -#----------------------------------------------------------------------- -# -# Set up variables for call to retrieve_data.py -# -#----------------------------------------------------------------------- -# -yyyymmdd=${FIRE_FILE_CDATE:0:8} -hh=${FIRE_FILE_CDATE:8:2} - -CDATE_mh1=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC - 1 hours" "+%Y%m%d%H" ) - -yyyymmdd_mh1=${CDATE_mh1:0:8} -hh_mh1=${CDATE_mh1:8:2} -# -#----------------------------------------------------------------------- -# -# Retrieve fire file to FIRE_EMISSION_STAGING_DIR -# -#----------------------------------------------------------------------- -# -aqm_fire_file_fn="${AQM_FIRE_FILE_PREFIX}_${yyyymmdd}_t${hh}z${AQM_FIRE_FILE_SUFFIX}" - -# Check if the fire file exists in the designated directory -if [ -e "${DCOMINfire}/${yyyymmdd}/${aqm_fire_file_fn}" ]; then - cp_vrfy "${DCOMINfire}/${yyyymmdd}/${aqm_fire_file_fn}" "${FIRE_EMISSION_STAGING_DIR}" -else - # Copy raw data - for ihr in {0..23}; do - download_time=$( $DATE_UTIL --utc --date "${yyyymmdd_mh1} ${hh_mh1} UTC - $ihr hours" "+%Y%m%d%H" ) - FILE_13km="Hourly_Emissions_13km_${download_time}00_${download_time}00.nc" - yyyymmdd_dn=${download_time:0:8} - hh_dn=${download_time:8:2} - missing_download_time=$( $DATE_UTIL --utc --date "${yyyymmdd_dn} ${hh_dn} UTC - 24 hours" "+%Y%m%d%H" ) - yyyymmdd_dn_md1=${missing_download_time:0:8} - FILE_13km_md1=Hourly_Emissions_13km_${missing_download_time}00_${missing_download_time}00.nc - if [ -e "${DCOMINfire}/${yyyymmdd_dn}/rave/${FILE_13km}" ]; then - cp_vrfy "${DCOMINfire}/${yyyymmdd_dn}/rave/${FILE_13km}" . - elif [ -e "${DCOMINfire}/${yyyymmdd_dn_md1}/rave/${FILE_13km_md1}" ]; then - echo "WARNING: ${FILE_13km} does not exist. Replacing with the file of previous date ..." - cp_vrfy "${DCOMINfire}/${yyyymmdd_dn_md1}/rave/${FILE_13km_md1}" "${FILE_13km}" - else - message_txt="Fire Emission RAW data does not exist: - FILE_13km_md1 = \"${FILE_13km_md1}\" - DCOMINfire = \"${DCOMINfire}\"" - - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - cp_vrfy "${DCOMINfire}/Hourly_Emissions_13km_dummy.nc" "${FILE_13km}" - message_warning="WARNING: ${message_txt}. Replacing with the dummy file :: AQM RUN SOFT FAILED." - print_info_msg "${message_warning}" - if [ ! -z "${maillist}" ]; then - echo "${message_warning}" | mail.py $maillist - fi - else - print_err_msg_exit "${message_txt}" - fi - fi - done - - ncks -O -h --mk_rec_dmn time Hourly_Emissions_13km_${download_time}00_${download_time}00.nc temp.nc - export err=$? - if [ $err -ne 0 ]; then - message_txt="Call to NCKS returned with nonzero exit code." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi - fi - - mv_vrfy temp.nc Hourly_Emissions_13km_${download_time}00_${download_time}00.nc - - ncrcat -h Hourly_Emissions_13km_*.nc Hourly_Emissions_13km_${yyyymmdd}0000_${yyyymmdd}2300.t${cyc}z.nc - export err=$? - if [ $err -ne 0 ]; then - message_txt="Call to NCRCAT returned with nonzero exit code." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi - fi - - input_fire="${DATA}/Hourly_Emissions_13km_${yyyymmdd}0000_${yyyymmdd}2300.t${cyc}z.nc" - output_fire="${DATA}/Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_new24.t${cyc}z.nc" - - python3 ${HOMEdir}/sorc/AQM-utils/python_utils/RAVE_remake.allspecies.aqmna13km.g793.py --date "${yyyymmdd}" --cyc "${hh}" --input_fire "${input_fire}" --output_fire "${output_fire}" - export err=$? - if [ $err -ne 0 ]; then - message_txt="Call to python script \"RAVE_remake.allspecies.py\" returned with nonzero exit code." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi - fi - - ncks --mk_rec_dmn Time Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_new24.t${cyc}z.nc -o Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_t${cyc}z_h24.nc - export err=$? - if [ $err -ne 0 ]; then - message_txt="Call to NCKS returned with nonzero exit code." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi - fi - - ncrcat Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_t${cyc}z_h24.nc Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_t${cyc}z_h24.nc Hourly_Emissions_regrid_NA_13km_${yyyymmdd}_t${cyc}z_h24.nc ${aqm_fire_file_fn} - export err=$? - if [ $err -ne 0 ]; then - message_txt="Call to NCRCAT returned with nonzero exit code." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi - fi - - # Copy the final fire emission file to STAGING_DIR - cp_vrfy "${DATA}/${aqm_fire_file_fn}" "${FIRE_EMISSION_STAGING_DIR}" - - # Archive the final fire emission file to disk and HPSS - if [ "${DO_AQM_SAVE_FIRE}" = "TRUE" ]; then - cp "${DATA}/${aqm_fire_file_fn}" ${DCOMINfire} - - hsi_log_fn="log.hsi_put.${yyyymmdd}_${hh}" - hsi put ${aqm_fire_file_fn} : ${AQM_FIRE_ARCHV_DIR}/${aqm_fire_file_fn} >& ${hsi_log_fn} - export err=$? - if [ $err -ne 0 ]; then - message_txt="htar file writing operation (\"hsi put ...\") failed. Check the log -file hsi_log_fn in the DATA directory for details: - DATA = \"${DATA}\" - hsi_log_fn = \"${hsi_log_fn}\"" - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi - fi - fi -fi -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/function. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 diff --git a/scripts/exregional_get_extrn_mdl_files.sh b/scripts/exregional_get_extrn_mdl_files.sh index 018a30c285..96c3136e33 100755 --- a/scripts/exregional_get_extrn_mdl_files.sh +++ b/scripts/exregional_get_extrn_mdl_files.sh @@ -1,5 +1,65 @@ #!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# The ex-script for getting the model files that will be used for either +# initial conditions or lateral boundary conditions for the experiment. +# +# Run-time environment variables: +# +# CDATE +# COMIN +# cyc +# DATA +# EXTRN_MDL_CDATE +# EXTRN_MDL_NAME +# EXTRN_MDL_STAGING_DIR +# GLOBAL_VAR_DEFNS_FP +# ICS_OR_LBCS +# NET +# PDY +# TIME_OFFSET_HRS +# +# Experiment variables +# +# user: +# MACHINE +# PARMdir +# RUN_ENVIR +# USHdir +# +# platform: +# EXTRN_MDL_DATA_STORES +# +# workflow: +# DATE_FIRST_CYCL +# EXTRN_MDL_VAR_DEFNS_FN +# FCST_LEN_CYCL +# INCR_CYCL_FREQ +# SYMLINK_FIX_FILES +# +# task_get_extrn_lbcs: +# EXTRN_MDL_FILES_LBCS +# EXTRN_MDL_SOURCE_BASEDIR_LBCS +# EXTRN_MDL_SYSBASEDIR_LBCS +# FV3GFS_FILE_FMT_LBCS +# LBC_SPEC_INTVL_HRS +# +# task_get_extrn_ics: +# EXTRN_MDL_FILES_ICS +# EXTRN_MDL_SOURCE_BASEDIR_ICS +# EXTRN_MDL_SYSBASEDIR_ICS +# FV3GFS_FILE_FMT_ICS +# +# global: +# DO_ENSEMBLE +# NUM_ENS_MEMBERS +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +68,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_get_extrn_ics|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP} + +for sect in user nco platform workflow global task_get_extrn_lbcs \ + task_get_extrn_ics ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -147,12 +211,12 @@ if [ -n "${input_file_path:-}" ] ; then --input_file_path ${input_file_path}" fi -if [ $SYMLINK_FIX_FILES = "TRUE" ]; then +if [ $(boolify $SYMLINK_FIX_FILES) = "TRUE" ]; then additional_flags="$additional_flags \ --symlink" fi -if [ $DO_ENSEMBLE == "TRUE" ] ; then +if [ $(boolify $DO_ENSEMBLE) = "TRUE" ] ; then mem_dir="/mem{mem:03d}" member_list=(1 ${NUM_ENS_MEMBERS}) additional_flags="$additional_flags \ @@ -222,7 +286,7 @@ if [ "${EXTRN_MDL_NAME}" = "GEFS" ]; then for num in $(seq -f "%02g" ${NUM_ENS_MEMBERS}); do sorted_fn=( ) for fcst_hr in "${all_fcst_hrs_array[@]}"; do - # Read in filenames from $EXTRN_MDL_FNS and sort them + # Read in filenames from EXTRN_MDL_FNS and sort them base_path="${EXTRN_MDL_STAGING_DIR}/mem`printf %03d $num`" filenames_array=`awk -F= '/EXTRN_MDL_FNS/{print $2}' $base_path/${EXTRN_DEFNS}` for filename in ${filenames_array[@]}; do diff --git a/scripts/exregional_get_verif_obs.sh b/scripts/exregional_get_verif_obs.sh index a74f11cd3a..6ad6aaed0e 100755 --- a/scripts/exregional_get_verif_obs.sh +++ b/scripts/exregional_get_verif_obs.sh @@ -1,5 +1,28 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The ex-script that checks, pulls, and stages observation data for +# model verification. +# +# Run-time environment variables: +# +# FHR +# GLOBAL_VAR_DEFNS_FP +# OBS_DIR +# OBTYPE +# PDY +# VAR +# +# Experiment variables +# +# user: +# USHdir +# PARMdir +# +#----------------------------------------------------------------------- + # #----------------------------------------------------------------------- # @@ -8,7 +31,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task " " ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_integration_test.py b/scripts/exregional_integration_test.py new file mode 100755 index 0000000000..996cf6320e --- /dev/null +++ b/scripts/exregional_integration_test.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python3 + +################################################################################ +#### Python Script Documentation Block +# +# Script name: exregional_integration_test.py +# Script description: Ensures the correct number of netcdf files are generated +# for each experiment +# +# Author: Eddie Snyder Org: NOAA EPIC Date: 2024-02-05 +# +# Instructions: 1. Pass the appropriate info for the required arguments: +# --fcst_dir=/path/to/forecast/files +# --fcst_len= +# 2. Run script with arguments +# +# Notes/future work: - Currently SRW App only accepts netcdf as the UFS WM +# output file format. If that changes, then additional +# logic is needed to address the other file formats. +# - SRW App doesn't have a variable that updates the +# forecast increment. The UFS WM does with the +# output_fh variable, which can be found in the +# model_configure file. If it becomes available with +# the SRW App, then logic is needed to account for the +# forecast increment variable. +# +################################################################################ + +# -------------Import modules --------------------------# +import os +import sys +import logging +import argparse +import unittest + +# --------------Define some functions ------------------# + + +class TestExptFiles(unittest.TestCase): + fcst_dir = '' + filename_list = '' + + def test_fcst_files(self): + + for filename in self.filename_list: + filename_fp = "{0}/{1}".format(self.fcst_dir, filename) + + logging.info("Checking existence of: {0}".format(filename_fp)) + err_msg = "Missing file: {0}".format(filename_fp) + self.assertTrue(os.path.exists(filename_fp), err_msg) + +def setup_logging(debug=False): + + """Calls initialization functions for logging package, and sets the + user-defined level for logging in the script.""" + + level = logging.INFO + if debug: + level = logging.DEBUG + + logging.basicConfig(format="%(levelname)s: %(message)s ", level=level) + if debug: + logging.info("Logging level set to DEBUG") + + +# -------------Start of script -------------------------# +if __name__ == "__main__": + + parser = argparse.ArgumentParser() + parser.add_argument( + "--fcst_dir", + help="Directory to forecast files.", + required=True, + ) + parser.add_argument( + "--fcst_len", + help="Forecast length.", + required=True, + ) + parser.add_argument( + "--fcst_inc", + default="1", + help="Increment of forecast in hours.", + required=False, + ) + parser.add_argument( + "--debug", + action="store_true", + help="Print debug messages.", + required=False, + ) + parser.add_argument('unittest_args', nargs='*') + args = parser.parse_args() + sys.argv[1:] = args.unittest_args + + fcst_dir = str(args.fcst_dir) + fcst_len = int(args.fcst_len) + fcst_inc = int(args.fcst_inc) + + # Start logger + setup_logging() + + # Check if model_configure exists + model_configure_fp = "{0}/model_configure".format(fcst_dir) + + if not os.path.isfile(model_configure_fp): + logging.error("Experiments model_configure file is missing! Exiting!") + sys.exit(1) + + # Loop through model_configure file to find the netcdf base names + f = open(model_configure_fp, 'r') + + for line in f: + if line.startswith("filename_base"): + filename_base_1 = line.split("'")[1] + filename_base_2 = line.split("'")[3] + break + f.close() + + # Create list of expected filenames from the experiment + fcst_len = fcst_len + 1 + filename_list = [] + + for x in range(0, fcst_len, fcst_inc): + fhour = str(x).zfill(3) + filename_1 = "{0}f{1}.nc".format(filename_base_1, fhour) + filename_2 = "{0}f{1}.nc".format(filename_base_2, fhour) + filename_list.append(filename_1) + filename_list.append(filename_2) + + # Call unittest class + TestExptFiles.fcst_dir = fcst_dir + TestExptFiles.filename_list = filename_list + unittest.main() diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index 755e1c95c4..104875f8dc 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -1,5 +1,99 @@ #!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This script generates NetCDF-formatted grid files required as input +# the FV3 model configured for the regional domain. +# +# The output of this script is placed in a directory defined by GRID_DIR. +# +# More about the grid for regional configurations of FV3: +# +# a) This script creates grid files for tile 7 (reserved for the +# regional grid located soewhere within tile 6 of the 6 global +# tiles. +# +# b) Regional configurations of FV3 need two grid files, one with 3 +# halo cells and one with 4 halo cells. The width of the halo is +# the number of cells in the direction perpendicular to the +# boundary. +# +# c) The tile 7 grid file that this script creates includes a halo, +# with at least 4 cells to accommodate this requirement. The halo +# is made thinner in a subsequent step called "shave". +# +# d) We will let NHW denote the width of the wide halo that is wider +# than the required 3- or 4-cell halos. (NHW; N=number of cells, +# H=halo, W=wide halo) +# +# e) T7 indicates the cell count on tile 7. +# +# +# This script does the following: +# +# - Create the grid, either an ESGgrid with the regional_esg_grid +# executable or a GFDL-type grid with the hgrid executable +# - Calculate the regional grid's global uniform cubed-sphere grid +# equivalent resolution with the global_equiv_resol executable +# - Use the shave executable to reduce the halo to 3 and 4 cells +# - Call an ush script that runs the make_solo_mosaic executable +# +# Run-time environment variables: +# +# DATA +# GLOBAL_VAR_DEFNS_FP +# REDIRECT_OUT_ERR +# +# Experiment variables +# +# user: +# EXECdir +# USHdir +# +# platform: +# PRE_TASK_CMDS +# RUN_CMD_SERIAL + +# workflow: +# DOT_OR_USCORE +# GRID_GEN_METHOD +# RES_IN_FIXLAM_FILENAMES +# RGNL_GRID_NML_FN +# VERBOSE +# +# task_make_grid: +# GFDLgrid_NUM_CELLS +# GFDLgrid_USE_NUM_CELLS_IN_FILENAMES +# GRID_DIR +# +# constants: +# NH3 +# NH4 +# TILE_RGNL +# +# grid_params: +# DEL_ANGLE_X_SG +# DEL_ANGLE_Y_SG +# GFDLgrid_REFINE_RATIO +# IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG +# ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG +# JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG +# JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG +# LAT_CTR +# LON_CTR +# NEG_NX_OF_DOM_WITH_WIDE_HALO +# NEG_NY_OF_DOM_WITH_WIDE_HALO +# NHW +# NX +# NY +# PAZI +# STRETCH_FAC +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +102,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_grid" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow constants grid_params task_make_grid ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -196,7 +292,7 @@ fi # # Change location to the temporary (work) directory. # -cd_vrfy "$DATA" +cd "$DATA" print_info_msg "$VERBOSE" " Starting grid file generation..." @@ -266,29 +362,32 @@ generation executable (exec_fp): # namelist file. # settings=" -'regional_grid_nml': { - 'plon': ${LON_CTR}, - 'plat': ${LAT_CTR}, - 'delx': ${DEL_ANGLE_X_SG}, - 'dely': ${DEL_ANGLE_Y_SG}, - 'lx': ${NEG_NX_OF_DOM_WITH_WIDE_HALO}, - 'ly': ${NEG_NY_OF_DOM_WITH_WIDE_HALO}, - 'pazi': ${PAZI}, - } +'regional_grid_nml': + 'plon': ${LON_CTR} + 'plat': ${LAT_CTR} + 'delx': ${DEL_ANGLE_X_SG} + 'dely': ${DEL_ANGLE_Y_SG} + 'lx': ${NEG_NX_OF_DOM_WITH_WIDE_HALO} + 'ly': ${NEG_NY_OF_DOM_WITH_WIDE_HALO} + 'pazi': ${PAZI} " -# -# Call the python script to create the namelist file. -# - ${USHdir}/set_namelist.py -q -u "$settings" -o ${rgnl_grid_nml_fp} || \ - print_err_msg_exit "\ -Call to python script set_namelist.py to set the variables in the -regional_esg_grid namelist file failed. Parameters passed to this script -are: - Full path to output namelist file: - rgnl_grid_nml_fp = \"${rgnl_grid_nml_fp}\" - Namelist settings specified on command line (these have highest precedence): - settings = -$settings" + + # UW takes input from stdin when no -i/--input-config flag is provided + (cat << EOF +$settings +EOF +) | uw config realize \ + --input-format yaml \ + -o ${rgnl_grid_nml_fp} \ + -v \ + + err=$? + if [ $err -ne 0 ]; then + print_err_msg_exit "\ + Error creating regional_esg_grid namelist. + Settings for input are: + $settings" + fi # # Call the executable that generates the grid file. # @@ -311,7 +410,7 @@ fi # to the original directory. # grid_fp="$DATA/${grid_fn}" -cd_vrfy - +cd - print_info_msg "$VERBOSE" " Grid file generation completed successfully." @@ -370,7 +469,7 @@ res_equiv=${res_equiv//$'\n'/} #----------------------------------------------------------------------- # if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - if [ "${GFDLgrid_USE_NUM_CELLS_IN_FILENAMES}" = "TRUE" ]; then + if [ $(boolify "${GFDLgrid_USE_NUM_CELLS_IN_FILENAMES}") = "TRUE" ]; then CRES="C${GFDLgrid_NUM_CELLS}" else CRES="C${res_equiv}" @@ -378,7 +477,15 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then elif [ "${GRID_GEN_METHOD}" = "ESGgrid" ]; then CRES="C${res_equiv}" fi -set_file_param "${GLOBAL_VAR_DEFNS_FP}" "CRES" "'$CRES'" + + # UW takes the update values from stdin when no --update-file flag is + # provided. It needs --update-format to do it correctly, though. +echo "workflow: {CRES: ${CRES}}" | uw config realize \ + --input-file $GLOBAL_VAR_DEFNS_FP \ + --update-format yaml \ + --output-file $GLOBAL_VAR_DEFNS_FP \ + --verbose + # #----------------------------------------------------------------------- # @@ -390,7 +497,7 @@ set_file_param "${GLOBAL_VAR_DEFNS_FP}" "CRES" "'$CRES'" grid_fp_orig="${grid_fp}" grid_fn="${CRES}${DOT_OR_USCORE}grid.tile${TILE_RGNL}.halo${NHW}.nc" grid_fp="${GRID_DIR}/${grid_fn}" -mv_vrfy "${grid_fp_orig}" "${grid_fp}" +mv "${grid_fp_orig}" "${grid_fp}" # #----------------------------------------------------------------------- # @@ -447,7 +554,7 @@ unshaved_fp="${grid_fp}" # Once it is complete, we will move the resultant file from DATA to # GRID_DIR. # -cd_vrfy "$DATA" +cd "$DATA" # # Create an input namelist file for the shave executable to generate a # grid file with a 3-cell-wide halo from the one with a wide halo. Then @@ -475,7 +582,7 @@ The namelist file (nml_fn) used in this call is in directory DATA: nml_fn = \"${nml_fn}\" DATA = \"${DATA}\"" POST_STEP -mv_vrfy ${shaved_fp} ${GRID_DIR} +mv ${shaved_fp} ${GRID_DIR} # # Create an input namelist file for the shave executable to generate a # grid file with a 4-cell-wide halo from the one with a wide halo. Then @@ -503,7 +610,7 @@ The namelist file (nml_fn) used in this call is in directory DATA: nml_fn = \"${nml_fn}\" DATA = \"${DATA}\"" POST_STEP -mv_vrfy ${shaved_fp} ${GRID_DIR} +mv ${shaved_fp} ${GRID_DIR} # # Create an input namelist file for the shave executable to generate a # grid file without halo from the one with a wide halo. Then @@ -530,11 +637,11 @@ The namelist file (nml_fn) used in this call is in directory DATA: nml_fn = \"${nml_fn}\" DATA = \"${DATA}\"" POST_STEP -mv_vrfy ${shaved_fp} ${GRID_DIR} +mv ${shaved_fp} ${GRID_DIR} # # Change location to the original directory. # -cd_vrfy - +cd - # #----------------------------------------------------------------------- # @@ -611,7 +718,7 @@ failed." # #----------------------------------------------------------------------- # -# Call a function (set_FV3nml_sfc_climo_filenames) to set the values of +# Call a function (set_fv3nml_sfc_climo_filenames) to set the values of # those variables in the forecast model's namelist file that specify the # paths to the surface climatology files. These files will either already # be avaialable in a user-specified directory (SFC_CLIMO_DIR) or will be @@ -620,7 +727,7 @@ failed." # #----------------------------------------------------------------------- # -python3 $USHdir/set_FV3nml_sfc_climo_filenames.py \ +python3 $USHdir/set_fv3nml_sfc_climo_filenames.py \ --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ || print_err_msg_exit "\ Call to function to set surface climatology file names in the FV3 namelist diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh index b42c086624..debf526798 100755 --- a/scripts/exregional_make_ics.sh +++ b/scripts/exregional_make_ics.sh @@ -1,5 +1,83 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The ex-scrtipt that sets up and runs chgres_cube for preparing initial +# conditions for the FV3 forecast +# +# Run-time environment variables: +# +# COMIN +# COMOUT +# COMROOT +# DATA +# DATAROOT +# DATA_SHARE +# EXTRN_MDL_CDATE +# GLOBAL_VAR_DEFNS_FP +# INPUT_DATA +# NET +# PDY +# REDIRECT_OUT_ERR +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# EXECdir +# MACHINE +# PARMdir +# RUN_ENVIR +# USHdir +# +# platform: +# FIXgsm +# PRE_TASK_CMDS +# RUN_CMD_UTILS +# +# workflow: +# CCPP_PHYS_SUITE +# COLDSTART +# CRES +# DATE_FIRST_CYCL +# DOT_OR_USCORE +# EXTRN_MDL_VAR_DEFNS_FN +# FIXlam +# SDF_USES_RUC_LSM +# SDF_USES_THOMPSON_MP +# THOMPSON_MP_CLIMO_FP +# VERBOSE +# +# task_make_ics: +# FVCOM_DIR +# FVCOM_FILE +# FVCOM_WCSTART +# KMP_AFFINITY_MAKE_ICS +# OMP_NUM_THREADS_MAKE_ICS +# OMP_STACKSIZE_MAKE_ICS +# USE_FVCOM +# VCOORD_FILE +# +# task_get_extrn_ics: +# EXTRN_MDL_NAME_ICS +# FV3GFS_FILE_FMT_ICS +# +# global: +# HALO_BLEND +# +# cpl_aqm_parm: +# CPL_AQM +# +# constants: +# NH0 +# NH4 +# TILE_RGNL +# +#----------------------------------------------------------------------- +# + + # #----------------------------------------------------------------------- # @@ -8,7 +86,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_ics|task_get_extrn_ics" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow global cpl_aqm_parm constants task_get_extrn_ics task_make_ics ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -117,6 +197,7 @@ case "${CCPP_PHYS_SUITE}" in "FV3_HRRR" | \ "FV3_RAP" ) if [ "${EXTRN_MDL_NAME_ICS}" = "RAP" ] || \ + [ "${EXTRN_MDL_NAME_ICS}" = "RRFS" ] || \ [ "${EXTRN_MDL_NAME_ICS}" = "HRRR" ]; then varmap_file="GSDphys_var_map.txt" elif [ "${EXTRN_MDL_NAME_ICS}" = "NAM" ] || \ @@ -165,7 +246,7 @@ esac # # fn_grib2: # Name (not including path) of the grib2 file generated by the external -# model. Currently used for NAM, RAP, and HRRR external model data. +# model. Currently used for NAM, RAP, and HRRR/RRFS external model data. # # input_type: # The "type" of input being provided to chgres_cube. This contains a combi- @@ -241,7 +322,7 @@ esac # tracers_input(:), it must also be 3rd in tracers(:). How can this be checked? # # NOTE: Really should use a varmap table for GFS, just like we do for -# RAP/HRRR. +# RAP/HRRR/RRFS. # # A non-prognostic variable that appears in the field_table for GSD physics # is cld_amt. Why is that in the field_table at all (since it is a non- @@ -274,7 +355,7 @@ convert_nst="" # # If the external model is not one that uses the RUC land surface model # (LSM) -- which currently includes all valid external models except the -# HRRR and the RAP -- then we set the number of soil levels to include +# HRRR/RRFS and the RAP -- then we set the number of soil levels to include # in the output NetCDF file that chgres_cube generates (nsoill_out; this # is a variable in the namelist that chgres_cube reads in) to 4. This # is because FV3 can handle this regardless of the LSM that it is using @@ -285,7 +366,7 @@ convert_nst="" # 4 soil layers to the 9 layers that it uses. # # On the other hand, if the external model is one that uses the RUC LSM -# (currently meaning that it is either the HRRR or the RAP), then what +# (currently meaning that it is either the HRRR/RRFS or the RAP), then what # we set nsoill_out to depends on whether the RUC or the Noah/Noah MP # LSM is used in the SDF. If the SDF uses RUC, then both the external # model and FV3 use RUC (which expects 9 soil levels), so we simply set @@ -299,21 +380,22 @@ convert_nst="" # 9 to 4 levels. # # In summary, we can set nsoill_out to 4 unless the external model is -# the HRRR or RAP AND the forecast model is using the RUC LSM. +# the HRRR/RRFS or RAP AND the forecast model is using the RUC LSM. # #----------------------------------------------------------------------- # nsoill_out="4" if [ "${EXTRN_MDL_NAME_ICS}" = "HRRR" -o \ + "${EXTRN_MDL_NAME_ICS}" = "RRFS" -o \ "${EXTRN_MDL_NAME_ICS}" = "RAP" ] && \ - [ "${SDF_USES_RUC_LSM}" = "TRUE" ]; then + [ $(boolify "${SDF_USES_RUC_LSM}") = "TRUE" ]; then nsoill_out="9" fi # #----------------------------------------------------------------------- # # If the external model for ICs is one that does not provide the aerosol -# fields needed by Thompson microphysics (currently only the HRRR and +# fields needed by Thompson microphysics (currently only the HRRR/RRFS and # RAP provide aerosol data) and if the physics suite uses Thompson # microphysics, set the variable thomp_mp_climo_file in the chgres_cube # namelist to the full path of the file containing aerosol climatology @@ -325,8 +407,9 @@ fi # thomp_mp_climo_file="" if [ "${EXTRN_MDL_NAME_ICS}" != "HRRR" -a \ + "${EXTRN_MDL_NAME_ICS}" != "RRFS" -a \ "${EXTRN_MDL_NAME_ICS}" != "RAP" ] && \ - [ "${SDF_USES_THOMPSON_MP}" = "TRUE" ]; then + [ $(boolify "${SDF_USES_THOMPSON_MP}") = "TRUE" ]; then thomp_mp_climo_file="${THOMPSON_MP_CLIMO_FP}" fi # @@ -439,8 +522,9 @@ case "${EXTRN_MDL_NAME_ICS}" in tg3_from_soil=False ;; -"HRRR") +"HRRR"|"RRFS") external_model="HRRR" + fn_grib2="${EXTRN_MDL_FNS[0]}" input_type="grib2" # @@ -546,64 +630,60 @@ fi # IMPORTANT: # If we want a namelist variable to be removed from the namelist file, # in the "settings" variable below, we need to set its value to the -# string "null". This is equivalent to setting its value to -# !!python/none -# in the base namelist file specified by FV3_NML_BASE_SUITE_FP or the -# suite-specific yaml settings file specified by FV3_NML_YAML_CONFIG_FP. -# -# It turns out that setting the variable to an empty string also works -# to remove it from the namelist! Which is better to use?? +# string "null". # settings=" -'config': { - 'fix_dir_target_grid': ${FIXlam}, - 'mosaic_file_target_grid': ${FIXlam}/${CRES}${DOT_OR_USCORE}mosaic.halo$((10#${NH4})).nc, - 'orog_dir_target_grid': ${FIXlam}, - 'orog_files_target_grid': ${CRES}${DOT_OR_USCORE}oro_data.tile${TILE_RGNL}.halo$((10#${NH4})).nc, - 'vcoord_file_target_grid': ${VCOORD_FILE}, - 'varmap_file': ${PARMdir}/ufs_utils/varmap_tables/${varmap_file}, - 'data_dir_input_grid': ${extrn_mdl_staging_dir}, - 'atm_files_input_grid': ${fn_atm}, - 'sfc_files_input_grid': ${fn_sfc}, - 'grib2_file_input_grid': \"${fn_grib2}\", - 'cycle_mon': $((10#${mm})), - 'cycle_day': $((10#${dd})), - 'cycle_hour': $((10#${hh})), - 'convert_atm': True, - 'convert_sfc': True, - 'convert_nst': ${convert_nst}, - 'regional': 1, - 'halo_bndy': $((10#${NH4})), - 'halo_blend': $((10#${HALO_BLEND})), - 'input_type': ${input_type}, - 'external_model': ${external_model}, - 'tracers_input': ${tracers_input}, - 'tracers': ${tracers}, - 'nsoill_out': $((10#${nsoill_out})), - 'geogrid_file_input_grid': ${geogrid_file_input_grid}, - 'vgtyp_from_climo': ${vgtyp_from_climo}, - 'sotyp_from_climo': ${sotyp_from_climo}, - 'vgfrc_from_climo': ${vgfrc_from_climo}, - 'minmax_vgfrc_from_climo': ${minmax_vgfrc_from_climo}, - 'lai_from_climo': ${lai_from_climo}, - 'tg3_from_soil': ${tg3_from_soil}, - 'thomp_mp_climo_file': ${thomp_mp_climo_file}, -} +'config': + 'fix_dir_target_grid': ${FIXlam} + 'mosaic_file_target_grid': ${FIXlam}/${CRES}${DOT_OR_USCORE}mosaic.halo$((10#${NH4})).nc + 'orog_dir_target_grid': ${FIXlam} + 'orog_files_target_grid': ${CRES}${DOT_OR_USCORE}oro_data.tile${TILE_RGNL}.halo$((10#${NH4})).nc + 'vcoord_file_target_grid': ${VCOORD_FILE} + 'varmap_file': ${PARMdir}/ufs_utils/varmap_tables/${varmap_file} + 'data_dir_input_grid': ${extrn_mdl_staging_dir} + 'atm_files_input_grid': ${fn_atm} + 'sfc_files_input_grid': ${fn_sfc} + 'grib2_file_input_grid': \"${fn_grib2}\" + 'cycle_mon': $((10#${mm})) + 'cycle_day': $((10#${dd})) + 'cycle_hour': $((10#${hh})) + 'convert_atm': True + 'convert_sfc': True + 'convert_nst': ${convert_nst} + 'regional': 1 + 'halo_bndy': $((10#${NH4})) + 'halo_blend': $((10#${HALO_BLEND})) + 'input_type': ${input_type} + 'external_model': ${external_model} + 'tracers_input': ${tracers_input} + 'tracers': ${tracers} + 'nsoill_out': $((10#${nsoill_out})) + 'geogrid_file_input_grid': ${geogrid_file_input_grid} + 'vgtyp_from_climo': ${vgtyp_from_climo} + 'sotyp_from_climo': ${sotyp_from_climo} + 'vgfrc_from_climo': ${vgfrc_from_climo} + 'minmax_vgfrc_from_climo': ${minmax_vgfrc_from_climo} + 'lai_from_climo': ${lai_from_climo} + 'tg3_from_soil': ${tg3_from_soil} + 'thomp_mp_climo_file': ${thomp_mp_climo_file} " -# -# Call the python script to create the namelist file. -# + + nml_fn="fort.41" -${USHdir}/set_namelist.py -q -u "$settings" -o ${nml_fn} + +(cat << EOF +$settings +EOF +) | uw config realize \ + --input-format yaml \ + -o ${nml_fn} \ + --output-format nml\ + -v \ + err=$? if [ $err -ne 0 ]; then - message_txt="Call to python script set_namelist.py to set the variables -in the namelist file read in by the ${exec_fn} executable failed. Parameters -passed to this script are: - Name of output namelist file: - nml_fn = \"${nml_fn}\" - Namelist settings specified on command line (these have highest precedence): - settings = + message_txt="Error creating namelist read by ${exec_fn} failed. + Settings for input are: $settings" if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then err_exit "${message_txt}" @@ -611,6 +691,7 @@ $settings" print_err_msg_exit "${message_txt}" fi fi + # #----------------------------------------------------------------------- # @@ -646,15 +727,23 @@ POST_STEP # #----------------------------------------------------------------------- # -mv_vrfy out.atm.tile${TILE_RGNL}.nc \ - ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc - -mv_vrfy out.sfc.tile${TILE_RGNL}.nc \ - ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc - -mv_vrfy gfs_ctrl.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc - -mv_vrfy gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc +if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then + COMOUT="${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}" #temporary path, should be removed later + if [ $(boolify "${COLDSTART}") = "TRUE" ] && [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ]; then + data_trans_path="${COMOUT}" + else + data_trans_path="${DATA_SHARE}" + fi + cp -p out.atm.tile${TILE_RGNL}.nc "${data_trans_path}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc" + cp -p out.sfc.tile${TILE_RGNL}.nc "${COMOUT}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc" + cp -p gfs_ctrl.nc "${COMOUT}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc" + cp -p gfs.bndy.nc "${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc" +else + mv out.atm.tile${TILE_RGNL}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc + mv out.sfc.tile${TILE_RGNL}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc + mv gfs_ctrl.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc + mv gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc +fi # #----------------------------------------------------------------------- # @@ -662,7 +751,7 @@ mv_vrfy gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TI # #----------------------------------------------------------------------- # -if [ "${USE_FVCOM}" = "TRUE" ]; then +if [ $(boolify "${USE_FVCOM}") = "TRUE" ]; then #Format for fvcom_time: YYYY-MM-DDTHH:00:00.000000 fvcom_exec_fn="fvcom_to_FV3" @@ -679,7 +768,7 @@ Please ensure that you've built this executable." print_err_msg_exit "${message_txt}" fi fi - cp_vrfy ${fvcom_exec_fp} ${INPUT_DATA}/. + cp ${fvcom_exec_fp} ${INPUT_DATA}/. fvcom_data_fp="${FVCOM_DIR}/${FVCOM_FILE}" if [ ! -f "${fvcom_data_fp}" ]; then message_txt="The file or path (fvcom_data_fp) does not exist: @@ -694,8 +783,8 @@ Please check the following user defined variables: fi fi - cp_vrfy ${fvcom_data_fp} ${INPUT_DATA}/fvcom.nc - cd_vrfy ${INPUT_DATA} + cp ${fvcom_data_fp} ${INPUT_DATA}/fvcom.nc + cd ${INPUT_DATA} PREP_STEP eval ${RUN_CMD_UTILS} ${fvcom_exec_fn} \ ${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc fvcom.nc ${FVCOM_WCSTART} ${fvcom_time} \ diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh index 72f9369ff6..acbe97a56b 100755 --- a/scripts/exregional_make_lbcs.sh +++ b/scripts/exregional_make_lbcs.sh @@ -1,5 +1,83 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The ex-scrtipt that sets up and runs chgres_cube for preparing lateral +# boundary conditions for the FV3 forecast +# +# Run-time environment variables: +# +# COMIN +# COMOUT +# COMROOT +# DATA +# DATAROOT +# DATA_SHARE +# EXTRN_MDL_CDATE +# INPUT_DATA +# GLOBAL_VAR_DEFNS_FP +# NET +# PDY +# REDIRECT_OUT_ERR +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# EXECdir +# MACHINE +# PARMdir +# RUN_ENVIR +# USHdir +# +# platform: +# FIXgsm +# PRE_TASK_CMDS +# RUN_CMD_UTILS +# +# workflow: +# CCPP_PHYS_SUITE +# COLDSTART +# CRES +# DATE_FIRST_CYCL +# DOT_OR_USCORE +# EXTRN_MDL_VAR_DEFNS_FN +# FIXlam +# SDF_USES_RUC_LSM +# SDF_USES_THOMPSON_MP +# THOMPSON_MP_CLIMO_FP +# VERBOSE +# +# task_get_extrn_lbcs: +# EXTRN_MDL_NAME_LBCS +# FV3GFS_FILE_FMT_LBCS +# +# task_make_lbcs: +# FVCOM_DIR +# FVCOM_FILE +# FVCOM_WCSTART +# KMP_AFFINITY_MAKE_LBCS +# OMP_NUM_THREADS_MAKE_LBCS +# OMP_STACKSIZE_MAKE_LBCS +# USE_FVCOM +# VCOORD_FILE +# +# global: +# HALO_BLEND +# +# cpl_aqm_parm: +# CPL_AQM +# +# constants: +# NH0 +# NH4 +# TILE_RGNL +# +#----------------------------------------------------------------------- +# + + # #----------------------------------------------------------------------- # @@ -8,7 +86,10 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_lbcs|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP} +set -x +for sect in user nco platform workflow global cpl_aqm_parm constants task_get_extrn_lbcs task_make_lbcs ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -115,6 +196,7 @@ case "${CCPP_PHYS_SUITE}" in "FV3_HRRR" | \ "FV3_RAP") if [ "${EXTRN_MDL_NAME_LBCS}" = "RAP" ] || \ + [ "${EXTRN_MDL_NAME_LBCS}" = "RRFS" ] || \ [ "${EXTRN_MDL_NAME_LBCS}" = "HRRR" ]; then varmap_file="GSDphys_var_map.txt" elif [ "${EXTRN_MDL_NAME_LBCS}" = "NAM" ] || \ @@ -158,7 +240,7 @@ esac # # fn_grib2: # Name (not including path) of the grib2 file generated by the external -# model. Currently used for NAM, RAP, and HRRR external model data. +# model. Currently used for NAM, RAP, and HRRR/RRFS external model data. # # input_type: # The "type" of input being provided to chgres_cube. This contains a combi- @@ -213,7 +295,7 @@ esac # tracers_input(:), it must also be 3rd in tracers(:). How can this be checked? # # NOTE: Really should use a varmap table for GFS, just like we do for -# RAP/HRRR. +# RAP/HRRR/RRFS. # # A non-prognostic variable that appears in the field_table for GSD physics @@ -237,7 +319,7 @@ tracers="\"\"" #----------------------------------------------------------------------- # # If the external model for LBCs is one that does not provide the aerosol -# fields needed by Thompson microphysics (currently only the HRRR and +# fields needed by Thompson microphysics (currently only the HRRR/RRFS and # RAP provide aerosol data) and if the physics suite uses Thompson # microphysics, set the variable thomp_mp_climo_file in the chgres_cube # namelist to the full path of the file containing aerosol climatology @@ -249,8 +331,9 @@ tracers="\"\"" # thomp_mp_climo_file="" if [ "${EXTRN_MDL_NAME_LBCS}" != "HRRR" -a \ + "${EXTRN_MDL_NAME_LBCS}" != "RRFS" -a \ "${EXTRN_MDL_NAME_LBCS}" != "RAP" ] && \ - [ "${SDF_USES_THOMPSON_MP}" = "TRUE" ]; then + [ $(boolify "${SDF_USES_THOMPSON_MP}") = "TRUE" ]; then thomp_mp_climo_file="${THOMPSON_MP_CLIMO_FP}" fi # @@ -320,7 +403,7 @@ case "${EXTRN_MDL_NAME_LBCS}" in input_type="grib2" ;; -"HRRR") +"HRRR"|"RRFS") external_model="HRRR" input_type="grib2" ;; @@ -410,7 +493,7 @@ for (( ii=0; ii<${num_fhrs}; ii=ii+bcgrpnum10 )); do fi ;; "GDAS") - fn_atm="${EXTRN_MDL_FNS[0][$i]}" + fn_atm="${EXTRN_MDL_FNS[$i]}" ;; "GEFS") fn_grib2="${EXTRN_MDL_FNS[$i]}" @@ -421,6 +504,9 @@ for (( ii=0; ii<${num_fhrs}; ii=ii+bcgrpnum10 )); do "HRRR") fn_grib2="${EXTRN_MDL_FNS[$i]}" ;; + "RRFS") + fn_grib2="${EXTRN_MDL_FNS[$i]}" + ;; "NAM") fn_grib2="${EXTRN_MDL_FNS[$i]}" ;; @@ -467,53 +553,48 @@ FORTRAN namelist file has not specified for this external LBC model (EXTRN_MDL_N # IMPORTANT: # If we want a namelist variable to be removed from the namelist file, # in the "settings" variable below, we need to set its value to the -# string "null". This is equivalent to setting its value to -# !!python/none -# in the base namelist file specified by FV3_NML_BASE_SUITE_FP or the -# suite-specific yaml settings file specified by FV3_NML_YAML_CONFIG_FP. -# -# It turns out that setting the variable to an empty string also works -# to remove it from the namelist! Which is better to use?? -# -settings=" -'config': { - 'fix_dir_target_grid': ${FIXlam}, - 'mosaic_file_target_grid': ${FIXlam}/${CRES}${DOT_OR_USCORE}mosaic.halo$((10#${NH4})).nc, - 'orog_dir_target_grid': ${FIXlam}, - 'orog_files_target_grid': ${CRES}${DOT_OR_USCORE}oro_data.tile${TILE_RGNL}.halo$((10#${NH4})).nc, - 'vcoord_file_target_grid': ${VCOORD_FILE}, - 'varmap_file': ${PARMdir}/ufs_utils/varmap_tables/${varmap_file}, - 'data_dir_input_grid': ${extrn_mdl_staging_dir}, - 'atm_files_input_grid': ${fn_atm}, - 'grib2_file_input_grid': \"${fn_grib2}\", - 'cycle_mon': $((10#${mm})), - 'cycle_day': $((10#${dd})), - 'cycle_hour': $((10#${hh})), - 'convert_atm': True, - 'regional': 2, - 'halo_bndy': $((10#${NH4})), - 'halo_blend': $((10#${HALO_BLEND})), - 'input_type': ${input_type}, - 'external_model': ${external_model}, - 'tracers_input': ${tracers_input}, - 'tracers': ${tracers}, - 'thomp_mp_climo_file': ${thomp_mp_climo_file}, -} +# string "null". +# + settings=" +'config': + 'fix_dir_target_grid': ${FIXlam} + 'mosaic_file_target_grid': ${FIXlam}/${CRES}${DOT_OR_USCORE}mosaic.halo$((10#${NH4})).nc + 'orog_dir_target_grid': ${FIXlam} + 'orog_files_target_grid': ${CRES}${DOT_OR_USCORE}oro_data.tile${TILE_RGNL}.halo$((10#${NH4})).nc + 'vcoord_file_target_grid': ${VCOORD_FILE} + 'varmap_file': ${PARMdir}/ufs_utils/varmap_tables/${varmap_file} + 'data_dir_input_grid': ${extrn_mdl_staging_dir} + 'atm_files_input_grid': ${fn_atm} + 'grib2_file_input_grid': \"${fn_grib2}\" + 'cycle_mon': $((10#${mm})) + 'cycle_day': $((10#${dd})) + 'cycle_hour': $((10#${hh})) + 'convert_atm': True + 'regional': 2 + 'halo_bndy': $((10#${NH4})) + 'halo_blend': $((10#${HALO_BLEND})) + 'input_type': ${input_type} + 'external_model': ${external_model} + 'tracers_input': ${tracers_input} + 'tracers': ${tracers} + 'thomp_mp_climo_file': ${thomp_mp_climo_file} " -# -# Call the python script to create the namelist file. -# + nml_fn="fort.41" - ${USHdir}/set_namelist.py -q -u "$settings" -o ${nml_fn} + # UW takes input from stdin when no -i/--input-config flag is provided + (cat << EOF +$settings +EOF +) | uw config realize \ + --input-format yaml \ + -o ${nml_fn} \ + --output-format nml \ + -v \ + export err=$? if [ $err -ne 0 ]; then - message_txt="Call to python script set_namelist.py to set the variables -in the namelist file read in by the ${exec_fn} executable failed. Parameters -passed to this script are: - Name of output namelist file: - nml_fn = \"${nml_fn}\" - Namelist settings specified on command line (these have highest precedence): - settings = + message_txt="Error creating namelist read by ${exec_fn} failed. + Settings for input are: $settings" if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then err_exit "${message_txt}" @@ -565,7 +646,11 @@ located in the following directory: lbc_spec_fhrs=( "${EXTRN_MDL_FHRS[$i]}" ) fcst_hhh=$(( ${lbc_spec_fhrs} - ${EXTRN_MDL_LBCS_OFFSET_HRS} )) fcst_hhh_FV3LAM=$( printf "%03d" "$fcst_hhh" ) - mv_vrfy gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then + cp -p gfs.bndy.nc ${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc + else + mv gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc + fi fi done diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index 4fcf34a4ad..34b1675d8c 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -1,5 +1,86 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# This ex-script is responsible for creating orography files for the FV3 +# forecast. +# +# The output of this script is placed in a directory defined by OROG_DIR +# +# More about the orog for the regional configuration of the FV3: +# +# a) Only the tile 7 orography file is created. +# +# b) This orography file contains a halo of the same width (NHW) +# as the grid file for tile 7 generated by the make_grid script +# +# c) Filtered versions of the orogoraphy files are created with the +# same width (NHW) as the unfiltered orography file and the grid +# file. FV3 requires two filtered orography files, one with no +# halo cells and one with 4 halo cells. +# +# This script does the following: +# +# - Create the raw orography files by running the orog executable. +# - Run the orog_gsl executable if any of several GSL-developed +# physics suites is chosen by the user. +# - Run the filter_topo executable on the raw orography files +# - Run the shave executable for the 0- and 4-cell halo orography +# files +# +# Run-time environment variables: +# +# DATA +# GLOBAL_VAR_DEFNS_FP +# REDIRECT_OUT_ERR +# +# Experiment variables +# +# user: +# EXECdir +# USHdir +# +# platform: +# FIXorg +# PRE_TASK_CMDS +# RUN_CMD_SERIAL +# +# workflow: +# CCPP_PHYS_SUITE +# CRES +# DOT_OR_USCORE +# FIXam +# FIXlam +# GRID_GEN_METHOD +# PREEXISTING_DIR_METHOD +# VERBOSE +# +# task_make_orog: +# KMP_AFFINITY_MAKE_OROG +# OMP_NUM_THREADS_MAKE_OROG +# OMP_STACKSIZE_MAKE_OROG +# OROG_DIR +# +# task_make_grid: +# GFDLgrid_NUM_CELLS +# GFDLgrid_STRETCH_FAC +# GFDLgrid_REFINE_RATIO +# +# constants: +# NH0 +# NH4 +# TILE_RGNL +# +# grid_params: +# NHW +# NX +# NY +# STRETCH_FAC +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +89,10 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_orog|task_make_grid" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow constants grid_params task_make_grid task_make_orog task_make_grid ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done + # #----------------------------------------------------------------------- # @@ -30,13 +114,7 @@ source_config_for_task "task_make_orog|task_make_grid" ${GLOBAL_VAR_DEFNS_FP} scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) scrfunc_fn=$( basename "${scrfunc_fp}" ) scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# + print_info_msg " ======================================================================== Entering script: \"${scrfunc_fn}\" @@ -54,17 +132,7 @@ This is the ex-script for the task that generates orography files. export KMP_AFFINITY=${KMP_AFFINITY_MAKE_OROG} export OMP_NUM_THREADS=${OMP_NUM_THREADS_MAKE_OROG} export OMP_STACKSIZE=${OMP_STACKSIZE_MAKE_OROG} -# -#----------------------------------------------------------------------- -# -# Load modules and set various computational parameters and directories. -# -# Note: -# These module loads should all be moved to modulefiles. This has been -# done for Hera but must still be done for other machines. -# -#----------------------------------------------------------------------- -# + eval ${PRE_TASK_CMDS} if [ -z "${RUN_CMD_SERIAL:-}" ] ; then @@ -85,16 +153,16 @@ fi #----------------------------------------------------------------------- # check_for_preexist_dir_file "${OROG_DIR}" "${PREEXISTING_DIR_METHOD}" -mkdir_vrfy -p "${OROG_DIR}" +mkdir -p "${OROG_DIR}" raw_dir="${OROG_DIR}/raw_topo" -mkdir_vrfy -p "${raw_dir}" +mkdir -p "${raw_dir}" filter_dir="${OROG_DIR}/filtered_topo" -mkdir_vrfy -p "${filter_dir}" +mkdir -p "${filter_dir}" shave_dir="${OROG_DIR}/shave_tmp" -mkdir_vrfy -p "${shave_dir}" +mkdir -p "${shave_dir}" # # #----------------------------------------------------------------------- @@ -103,9 +171,6 @@ mkdir_vrfy -p "${shave_dir}" # #----------------------------------------------------------------------- # -# Set the name and path to the executable that generates the raw orography -# file and make sure that it exists. -# exec_fn="orog" exec_fp="$EXECdir/${exec_fn}" if [ ! -f "${exec_fp}" ]; then @@ -114,32 +179,21 @@ The executable (exec_fp) for generating the orography file does not exist: exec_fp = \"${exec_fp}\" Please ensure that you've built this executable." fi -# -# Create a temporary (work) directory in which to generate the raw orography -# file and change location to it. -# + DATA="${DATA:-${raw_dir}/tmp}" -mkdir_vrfy -p "${DATA}" -cd_vrfy "${DATA}" +mkdir -p "${DATA}" +cd "${DATA}" # # Copy topography and related data files from the system directory (FIXorg) # to the temporary directory. # -cp_vrfy ${FIXorg}/thirty.second.antarctic.new.bin fort.15 -cp_vrfy ${FIXorg}/landcover30.fixed . -cp_vrfy ${FIXorg}/gmted2010.30sec.int fort.235 +cp ${FIXorg}/thirty.second.antarctic.new.bin fort.15 +cp ${FIXorg}/landcover30.fixed . +cp ${FIXorg}/gmted2010.30sec.int fort.235 # #----------------------------------------------------------------------- # -# The orography filtering code reads in from the grid mosaic file the -# the number of tiles, the name of the grid file for each tile, and the -# dimensions (nx and ny) of each tile. Next, set the name of the grid -# mosaic file and create a symlink to it in filter_dir. -# -# Note that in the namelist file for the orography filtering code (created -# later below), the mosaic file name is saved in a variable called -# "grid_file". It would have been better to call this "mosaic_file" -# instead so it doesn't get confused with the grid file for a given tile... +# Get the grid file info from the mosaic file # #----------------------------------------------------------------------- # @@ -152,21 +206,15 @@ grid_fp="${FIXlam}/${grid_fn}" # #----------------------------------------------------------------------- # -# Set input parameters for the orography generation executable and write -# them to a text file. +# Set input parameters for the orog executable in a formatted text file. +# The executable takes its parameters via the command line. # -# Note that it doesn't matter what lonb and latb are set to below because -# if we specify an input grid file to the executable read in (which is -# what we do below), then if lonb and latb are not set to the dimensions -# of the grid specified in that file (divided by 2 since the grid file -# specifies a "supergrid"), then lonb and latb effectively get reset to -# the dimensions specified in the grid file. +# Note: lonb and latb are placeholders in this case since the program +# uses the ones obtained from the grid file. # #----------------------------------------------------------------------- # mtnres=1 -#lonb=$res -#latb=$res lonb=0 latb=0 jcap=0 @@ -186,6 +234,8 @@ echo $mtnres $lonb $latb $jcap $NR $NF1 $NF2 $efac $blat > "${input_redirect_fn} # echo "\"${grid_fp}\"" >> "${input_redirect_fn}" echo "\"$orogfile\"" >> "${input_redirect_fn}" +echo ".false." >> "${input_redirect_fn}" #MASK_ONLY +echo "none" >> "${input_redirect_fn}" #MERGE_FILE cat "${input_redirect_fn}" # #----------------------------------------------------------------------- @@ -193,15 +243,13 @@ cat "${input_redirect_fn}" # Call the executable to generate the raw orography file corresponding # to tile 7 (the regional domain) only. # -# The following will create an orography file named +# The script moves the output file from its temporary directory to the +# OROG_DIR and names it: # -# oro.${CRES}.tile7.nc +# ${CRES}_raw_orog.tile7.halo${NHW}.nc # -# and will place it in OROG_DIR. Note that this file will include -# orography for a halo of width NHW cells around tile 7. The follow- -# ing will also create a work directory called tile7 under OROG_DIR. -# This work directory can be removed after the orography file has been -# created (it is currently not deleted). +# Note that this file will include orography for a halo of width NHW +# cells around tile 7. # #----------------------------------------------------------------------- # @@ -219,13 +267,11 @@ POST_STEP # # Change location to the original directory. # -cd_vrfy - +cd - # #----------------------------------------------------------------------- # -# Move the raw orography file from the temporary directory to raw_dir. -# In the process, rename it such that its name includes CRES and the halo -# width. +# Move the raw orography file and rename it. # #----------------------------------------------------------------------- # @@ -234,21 +280,21 @@ raw_orog_fn_prefix="${CRES}${DOT_OR_USCORE}raw_orog" fn_suffix_with_halo="tile${TILE_RGNL}.halo${NHW}.nc" raw_orog_fn="${raw_orog_fn_prefix}.${fn_suffix_with_halo}" raw_orog_fp="${raw_dir}/${raw_orog_fn}" -mv_vrfy "${raw_orog_fp_orig}" "${raw_orog_fp}" +mv "${raw_orog_fp_orig}" "${raw_orog_fp}" # #----------------------------------------------------------------------- # -# Call the code to generate the two orography statistics files (large- -# and small-scale) needed for the drag suite in the FV3_HRRR physics -# suite. +# Call the orog_gsl executable to generate the two orography statistics +# files (large- and small-scale) needed for the drag suite in certain +# GSL physics suites. # #----------------------------------------------------------------------- # suites=( "FV3_RAP" "FV3_HRRR" "FV3_GFS_v15_thompson_mynn_lam3km" "FV3_GFS_v17_p8" ) if [[ ${suites[@]} =~ "${CCPP_PHYS_SUITE}" ]] ; then DATA="${DATA:-${OROG_DIR}/temp_orog_data}" - mkdir_vrfy -p ${DATA} - cd_vrfy ${DATA} + mkdir -p ${DATA} + cd ${DATA} mosaic_fn_gwd="${CRES}${DOT_OR_USCORE}mosaic.halo${NH4}.nc" mosaic_fp_gwd="${FIXlam}/${mosaic_fn_gwd}" grid_fn_gwd=$( get_charvar_from_netcdf "${mosaic_fp_gwd}" "gridfiles" ) || \ @@ -256,12 +302,9 @@ if [[ ${suites[@]} =~ "${CCPP_PHYS_SUITE}" ]] ; then grid_fp_gwd="${FIXlam}/${grid_fn_gwd}" ls_fn="geo_em.d01.lat-lon.2.5m.HGT_M.nc" ss_fn="HGT.Beljaars_filtered.lat-lon.30s_res.nc" - create_symlink_to_file target="${grid_fp_gwd}" symlink="${DATA}/${grid_fn_gwd}" \ - relative="TRUE" - create_symlink_to_file target="${FIXam}/${ls_fn}" symlink="${DATA}/${ls_fn}" \ - relative="TRUE" - create_symlink_to_file target="${FIXam}/${ss_fn}" symlink="${DATA}/${ss_fn}" \ - relative="TRUE" + create_symlink_to_file ${grid_fp_gwd} ${DATA}/${grid_fn_gwd} TRUE + create_symlink_to_file ${FIXam}/${ls_fn} ${DATA}/${ls_fn} TRUE + create_symlink_to_file ${FIXam}/${ss_fn} ${DATA}/${ss_fn} TRUE input_redirect_fn="grid_info.dat" cat > "${input_redirect_fn}" < "${filter_dir}/input.nml" < "${filter_dir}/input.nml" < ${nml_fn} + > ${ascii_fn} PREP_STEP -eval ${RUN_CMD_SERIAL} ${exec_fp} < ${nml_fn} ${REDIRECT_OUT_ERR} || \ +eval ${RUN_CMD_SERIAL} ${exec_fp} < ${ascii_fn} ${REDIRECT_OUT_ERR} || \ print_err_msg_exit "\ Call to executable (exec_fp) to generate a (filtered) orography file with a ${NH0}-cell-wide halo from the orography file with a {NHW}-cell-wide halo returned with nonzero exit code: exec_fp = \"${exec_fp}\" -The namelist file (nml_fn) used in this call is in directory shave_dir: - nml_fn = \"${nml_fn}\" +The config file (ascii_fn) used in this call is in directory shave_dir: + ascii_fn = \"${ascii_fn}\" shave_dir = \"${shave_dir}\"" POST_STEP -mv_vrfy ${shaved_fp} ${OROG_DIR} +mv ${shaved_fp} ${OROG_DIR} # -# Create an input namelist file for the shave executable to generate an +# Create an input config file for the shave executable to generate an # orography file with a 4-cell-wide halo from the one with a wide halo. # Then call the shave executable. Finally, move the resultant file to # the OROG_DIR directory. @@ -526,33 +554,33 @@ print_info_msg "$VERBOSE" " \"Shaving\" filtered orography file with a ${NHW}-cell-wide halo to obtain a filtered orography file with a ${NH4}-cell-wide halo..." -nml_fn="input.shave.orog.halo${NH4}" +ascii_fn="input.shave.orog.halo${NH4}" shaved_fp="${shave_dir}/${CRES}${DOT_OR_USCORE}oro_data.tile${TILE_RGNL}.halo${NH4}.nc" printf "%s %s %s %s %s\n" \ $NX $NY ${NH4} \"${unshaved_fp}\" \"${shaved_fp}\" \ - > ${nml_fn} + > ${ascii_fn} PREP_STEP -eval ${RUN_CMD_SERIAL} ${exec_fp} < ${nml_fn} ${REDIRECT_OUT_ERR} || \ +eval ${RUN_CMD_SERIAL} ${exec_fp} < ${ascii_fn} ${REDIRECT_OUT_ERR} || \ print_err_msg_exit "\ Call to executable (exec_fp) to generate a (filtered) orography file with a ${NH4}-cell-wide halo from the orography file with a {NHW}-cell-wide halo returned with nonzero exit code: exec_fp = \"${exec_fp}\" -The namelist file (nml_fn) used in this call is in directory shave_dir: - nml_fn = \"${nml_fn}\" +The namelist file (ascii_fn) used in this call is in directory shave_dir: + ascii_fn = \"${ascii_fn}\" shave_dir = \"${shave_dir}\"" POST_STEP -mv_vrfy "${shaved_fp}" "${OROG_DIR}" +mv "${shaved_fp}" "${OROG_DIR}" # # Change location to the original directory. # -cd_vrfy - +cd - # #----------------------------------------------------------------------- # -# Add link in ORIG_DIR directory to the orography file with a 4-cell-wide -# halo such that the link name do not contain the halo width. These links +# Add link in OROG_DIR directory to the orography file with a 4-cell-wide +# halo such that the link name does not contain the halo width. These links # are needed by the make_sfc_climo task. # # NOTE: It would be nice to modify the sfc_climo_gen_code to read in @@ -565,13 +593,7 @@ python3 $USHdir/link_fix.py \ --file-group "orog" || \ print_err_msg_exit "\ Call to function to create links to orography files failed." -# -#----------------------------------------------------------------------- -# -# Print message indicating successful completion of script. -# -#----------------------------------------------------------------------- -# + print_info_msg " ======================================================================== Orography files with various halo widths generated successfully!!! diff --git a/scripts/exregional_make_sfc_climo.sh b/scripts/exregional_make_sfc_climo.sh index 868029a488..a916228b1f 100755 --- a/scripts/exregional_make_sfc_climo.sh +++ b/scripts/exregional_make_sfc_climo.sh @@ -1,5 +1,52 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# This ex-script generates surface climatology files needed to run FV3 +# forecasts. +# +# The script runs the sfc_climo_gen UFS Utils program, and links the +# output to the SFC_CLIMO_GEN directory +# +# Run-time environment variables: +# +# DATA +# GLOBAL_VAR_DEFNS_FP +# REDIRECT_OUT_ERR +# +# Experiment variables +# +# user: +# EXECdir +# USHdir +# +# platform: +# FIXsfc +# PRE_TASK_CMDS +# RUN_CMD_UTILS +# +# workflow: +# CRES +# DOT_OR_USCORE +# FIXlam +# VERBOSE +# +# task_make_sfc_climo: +# KMP_AFFINITY_MAKE_SFC_CLIMO +# OMP_NUM_THREADS_MAKE_SFC_CLIMO +# OMP_STACKSIZE_MAKE_SFC_CLIMO +# SFC_CLIMO_DIR +# +# constants: +# GTYPE +# NH0 +# NH4 +# TILE_RGNL +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +55,9 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_make_sfc_climo" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow constants task_make_sfc_climo ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -70,7 +119,7 @@ ulimit -s unlimited # #----------------------------------------------------------------------- # -cd_vrfy $DATA +cd $DATA # #----------------------------------------------------------------------- # @@ -162,7 +211,7 @@ case "$GTYPE" in # for fn in *.nc; do if [[ -f $fn ]]; then - mv_vrfy $fn ${SFC_CLIMO_DIR}/${CRES}_${fn} + mv $fn ${SFC_CLIMO_DIR}/${CRES}_${fn} fi done ;; @@ -181,7 +230,7 @@ case "$GTYPE" in for fn in *.halo.nc; do if [ -f $fn ]; then bn="${fn%.halo.nc}" - mv_vrfy $fn ${SFC_CLIMO_DIR}/${CRES}.${bn}.halo${NH4}.nc + mv $fn ${SFC_CLIMO_DIR}/${CRES}.${bn}.halo${NH4}.nc fi done # @@ -194,7 +243,7 @@ case "$GTYPE" in for fn in *.nc; do if [ -f $fn ]; then bn="${fn%.nc}" - mv_vrfy $fn ${SFC_CLIMO_DIR}/${CRES}.${bn}.halo${NH0}.nc + mv $fn ${SFC_CLIMO_DIR}/${CRES}.${bn}.halo${NH0}.nc fi done ;; diff --git a/scripts/exregional_plot_allvars.py b/scripts/exregional_plot_allvars.py index 27eff0f4b0..040e17b012 100755 --- a/scripts/exregional_plot_allvars.py +++ b/scripts/exregional_plot_allvars.py @@ -577,6 +577,7 @@ def plot_all(dom): facecolor="none", linewidth=fline_wd, alpha=falpha, + zorder=4, ) coastline = cfeature.NaturalEarthFeature( "physical", @@ -586,6 +587,7 @@ def plot_all(dom): facecolor="none", linewidth=fline_wd, alpha=falpha, + zorder=4, ) states = cfeature.NaturalEarthFeature( "cultural", @@ -596,6 +598,7 @@ def plot_all(dom): linewidth=fline_wd, linestyle=":", alpha=falpha, + zorder=4, ) borders = cfeature.NaturalEarthFeature( "cultural", @@ -605,6 +608,7 @@ def plot_all(dom): facecolor="none", linewidth=fline_wd, alpha=falpha, + zorder=4, ) # All lat lons are earth relative, so setup the associated projection correct for that data diff --git a/scripts/exregional_plot_allvars_diff.py b/scripts/exregional_plot_allvars_diff.py index e51a3a6b57..61efcdb82b 100755 --- a/scripts/exregional_plot_allvars_diff.py +++ b/scripts/exregional_plot_allvars_diff.py @@ -652,6 +652,7 @@ def plot_all(dom): facecolor="none", linewidth=fline_wd, alpha=falpha, + zorder=4, ) coastline = cfeature.NaturalEarthFeature( "physical", @@ -661,6 +662,7 @@ def plot_all(dom): facecolor="none", linewidth=fline_wd, alpha=falpha, + zorder=4, ) states = cfeature.NaturalEarthFeature( "cultural", @@ -671,6 +673,7 @@ def plot_all(dom): linewidth=fline_wd, linestyle=":", alpha=falpha, + zorder=4, ) borders = cfeature.NaturalEarthFeature( "cultural", @@ -680,6 +683,7 @@ def plot_all(dom): facecolor="none", linewidth=fline_wd, alpha=falpha, + zorder=4, ) # All lat lons are earth relative, so setup the associated projection correct for that data diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index c5519d923c..0241dbd728 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -1,5 +1,113 @@ #!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# This ex-script is responsible for running the FV3 regional forecast. +# +# Run-time environment variables: +# +# CDATE +# COMIN +# COMOUT +# COMROOT +# DATA +# DBNROOT +# GLOBAL_VAR_DEFNS_FP +# INPUT_DATA +# NET +# PDY +# REDIRECT_OUT_ERR +# RUN +# SENDDBN +# SLASH_ENSMEM_SUBDIR +# +# Experiment variables +# +# user: +# MACHINE +# PARMdir +# RUN_ENVIR +# USHdir +# +# platform: +# PRE_TASK_CMDS +# RUN_CMD_FCST +# +# workflow: +# CCPP_PHYS_DIR +# CCPP_PHYS_SUITE +# COLDSTART +# CRES +# DATA_TABLE_FN +# DATA_TABLE_FP +# DATE_FIRST_CYCL +# DOT_OR_USCORE +# EXPTDIR +# FCST_LEN_CYCL +# FCST_LEN_HRS +# FIELD_DICT_FP +# FIELD_DICT_FN +# FIELD_TABLE_FN +# FIELD_TABLE_FP +# FIXam +# FIXclim +# FIXlam +# FV3_NML_FN +# FV3_NML_FP +# FV3_NML_STOCH_FP +# INCR_CYCL_FREQ +# PREDEF_GRID_NAME +# SYMLINK_FIX_FILES +# VERBOSE +# +# task_get_extrn_lbcs: +# LBC_SPEC_INTVL_HRS +# +# task_run_fcst: +# DO_FCST_RESTART +# DT_ATMOS +# FV3_EXEC_FP +# KMP_AFFINITY_RUN_FCST +# OMP_NUM_THREADS_RUN_FCST +# OMP_STACKSIZE_RUN_FCST +# PRINT_ESMF +# RESTART_INTERVAL +# USE_MERRA_CLIMO +# WRITE_DOPOST +# +# task_run_post: +# CUSTOM_POST_CONFIG_FP +# DT_SUBHOURLY_POST_MNTS +# POST_OUTPUT_DOMAIN_NAME +# SUB_HOURLY_POST +# USE_CUSTOM_POST_CONFIG_FILE +# +# global: +# DO_ENSEMBLE +# DO_LSM_SPP +# DO_SHUM +# DO_SKEB +# DO_SPP +# DO_SPPT +# +# cpl_aqm_parm: +# AQM_RC_PRODUCT_FN +# CPL_AQM +# +# constants: +# NH0 +# NH3 +# NH4 +# TILE_RGNL +# +# fixed_files: +# CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +116,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_fcst|task_run_post|task_get_extrn_ics|task_get_extrn_lbcs" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow global cpl_aqm_parm constants fixed_files \ + task_get_extrn_lbcs task_run_fcst task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done + # #----------------------------------------------------------------------- # @@ -57,7 +169,7 @@ export OMP_NUM_THREADS=${OMP_NUM_THREADS_RUN_FCST} export OMP_STACKSIZE=${OMP_STACKSIZE_RUN_FCST} export MPI_TYPE_DEPTH=20 export ESMF_RUNTIME_COMPLIANCECHECK=OFF:depth=4 -if [ "${PRINT_ESMF}" = "TRUE" ]; then +if [ $(boolify "${PRINT_ESMF}") = "TRUE" ]; then export ESMF_RUNTIME_PROFILE=ON export ESMF_RUNTIME_PROFILE_OUTPUT="SUMMARY" fi @@ -98,7 +210,7 @@ Creating links in the INPUT subdirectory of the current run directory to the grid and (filtered) orography files ..." # Create links to fix files in the FIXlam directory. -cd_vrfy ${DATA}/INPUT +cd ${DATA}/INPUT # # For experiments in which the TN_MAKE_GRID task is run, we make the @@ -121,8 +233,7 @@ fi #target="${FIXlam}/${CRES}${DOT_OR_USCORE}mosaic.halo${NH4}.nc" # Should this point to this halo4 file or a halo3 file??? target="${FIXlam}/${CRES}${DOT_OR_USCORE}mosaic.halo${NH3}.nc" # Should this point to this halo4 file or a halo3 file??? symlink="grid_spec.nc" -create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" +create_symlink_to_file $target $symlink ${relative_link_flag} # Symlink to halo-3 grid file with "halo3" stripped from name. mosaic_fn="grid_spec.nc" @@ -130,8 +241,7 @@ grid_fn=$( get_charvar_from_netcdf "${mosaic_fn}" "gridfiles" ) target="${FIXlam}/${grid_fn}" symlink="${grid_fn}" -create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" +create_symlink_to_file $target $symlink ${relative_link_flag} # Symlink to halo-4 grid file with "${CRES}_" stripped from name. # @@ -147,8 +257,7 @@ create_symlink_to_file target="$target" symlink="$symlink" \ # target="${FIXlam}/${CRES}${DOT_OR_USCORE}grid.tile${TILE_RGNL}.halo${NH4}.nc" symlink="grid.tile${TILE_RGNL}.halo${NH4}.nc" -create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" +create_symlink_to_file $target $symlink ${relative_link_flag} # @@ -165,8 +274,7 @@ fi # Symlink to halo-0 orography file with "${CRES}_" and "halo0" stripped from name. target="${FIXlam}/${CRES}${DOT_OR_USCORE}oro_data.tile${TILE_RGNL}.halo${NH0}.nc" symlink="oro_data.nc" -create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" +create_symlink_to_file $target $symlink ${relative_link_flag} # # Symlink to halo-4 orography file with "${CRES}_" stripped from name. # @@ -182,8 +290,7 @@ create_symlink_to_file target="$target" symlink="$symlink" \ # target="${FIXlam}/${CRES}${DOT_OR_USCORE}oro_data.tile${TILE_RGNL}.halo${NH4}.nc" symlink="oro_data.tile${TILE_RGNL}.halo${NH4}.nc" -create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" +create_symlink_to_file $target $symlink ${relative_link_flag} # # If using the FV3_HRRR physics suite, there are two files (that contain # statistics of the orography) that are needed by the gravity wave drag @@ -198,8 +305,7 @@ if [[ ${suites[@]} =~ "${CCPP_PHYS_SUITE}" ]] ; then for file_id in "${file_ids[@]}"; do target="${FIXlam}/${CRES}${DOT_OR_USCORE}oro_data_${file_id}.tile${TILE_RGNL}.halo${NH0}.nc" symlink="oro_data_${file_id}.nc" - create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" + create_symlink_to_file $target $symlink ${relative_link_flag} done fi # @@ -225,7 +331,7 @@ of the current run directory (DATA), where DATA = \"${DATA}\" ..." -cd_vrfy ${DATA}/INPUT +cd ${DATA}/INPUT # # The symlinks to be created point to files in the same directory (INPUT), @@ -233,42 +339,55 @@ cd_vrfy ${DATA}/INPUT # relative_link_flag="FALSE" -target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc" -symlink="gfs_data.nc" -create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" +if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then + COMIN="${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}" #temporary path, should be removed later -target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc" -symlink="sfc_data.nc" -create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" + target="${COMIN}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc" + symlink="gfs_data.nc" + create_symlink_to_file $target $symlink ${relative_link_flag} -target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc" -symlink="gfs_ctrl.nc" -create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" + target="${COMIN}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc" + symlink="sfc_data.nc" + create_symlink_to_file $target $symlink ${relative_link_flag} + target="${COMIN}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc" + symlink="gfs_ctrl.nc" + create_symlink_to_file $target $symlink ${relative_link_flag} -for fhr in $(seq -f "%03g" 0 ${LBC_SPEC_INTVL_HRS} ${FCST_LEN_HRS}); do - target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f${fhr}.nc" - symlink="gfs_bndy.tile${TILE_RGNL}.${fhr}.nc" - create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" -done - -if [ "${CPL_AQM}" = "TRUE" ]; then - target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt.nc" + for fhr in $(seq -f "%03g" 0 ${LBC_SPEC_INTVL_HRS} ${FCST_LEN_HRS}); do + target="${COMIN}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f${fhr}.nc" + symlink="gfs_bndy.tile${TILE_RGNL}.${fhr}.nc" + create_symlink_to_file $target $symlink ${relative_link_flag} + done + target="${COMIN}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt.nc" symlink="NEXUS_Expt.nc" - create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" + create_symlink_to_file $target $symlink ${relative_link_flag} - # create symlink to PT for point source in Online-CMAQ - target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.PT.nc" + # create symlink to PT for point source in SRW-AQM + target="${COMIN}/${NET}.${cycle}${dot_ensmem}.PT.nc" if [ -f ${target} ]; then symlink="PT.nc" - create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" + create_symlink_to_file $target $symlink ${relative_link_flag} fi + +else + target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc" + symlink="gfs_data.nc" + create_symlink_to_file $target $symlink ${relative_link_flag} + + target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc" + symlink="sfc_data.nc" + create_symlink_to_file $target $symlink ${relative_link_flag} + + target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc" + symlink="gfs_ctrl.nc" + create_symlink_to_file $target $symlink ${relative_link_flag} + + for fhr in $(seq -f "%03g" 0 ${LBC_SPEC_INTVL_HRS} ${FCST_LEN_HRS}); do + target="${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f${fhr}.nc" + symlink="gfs_bndy.tile${TILE_RGNL}.${fhr}.nc" + create_symlink_to_file $target $symlink ${relative_link_flag} + done fi # #----------------------------------------------------------------------- @@ -281,7 +400,7 @@ fi # #----------------------------------------------------------------------- # -cd_vrfy ${DATA} +cd ${DATA} print_info_msg "$VERBOSE" " Creating links in the current run directory (DATA) to fixed (i.e. @@ -300,7 +419,7 @@ static) files in the FIXam directory: # isn't really an advantage to using relative symlinks, so we use symlinks # with absolute paths. # -if [ "${SYMLINK_FIX_FILES}" == "FALSE" ]; then +if [ $(boolify "${SYMLINK_FIX_FILES}") = "FALSE" ]; then relative_link_flag="TRUE" else relative_link_flag="FALSE" @@ -318,8 +437,7 @@ for (( i=0; i<${num_symlinks}; i++ )); do symlink="${DATA}/$symlink" target="$FIXam/$target" - create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" + create_symlink_to_file $target $symlink ${relative_link_flag} done # @@ -330,7 +448,7 @@ done # #----------------------------------------------------------------------- # -if [ "${USE_MERRA_CLIMO}" = "TRUE" ]; then +if [ $(boolify "${USE_MERRA_CLIMO}") = "TRUE" ]; then for f_nm_path in ${FIXclim}/*; do f_nm=$( basename "${f_nm_path}" ) pre_f="${f_nm%%.*}" @@ -342,8 +460,7 @@ if [ "${USE_MERRA_CLIMO}" = "TRUE" ]; then symlink="${DATA}/${pre_f}.dat" fi target="${f_nm_path}" - create_symlink_to_file target="$target" symlink="$symlink" \ - relative="${relative_link_flag}" + create_symlink_to_file $target $symlink ${relative_link_flag} done fi # @@ -355,8 +472,8 @@ fi # #----------------------------------------------------------------------- # -cd_vrfy ${DATA} -rm_vrfy -f time_stamp.out +cd ${DATA} +rm -f time_stamp.out # #----------------------------------------------------------------------- # @@ -386,28 +503,22 @@ else relative_link_flag="FALSE" fi -create_symlink_to_file target="${DATA_TABLE_FP}" \ - symlink="${DATA}/${DATA_TABLE_FN}" \ - relative="${relative_link_flag}" +create_symlink_to_file ${DATA_TABLE_FP} ${DATA}/${DATA_TABLE_FN} ${relative_link_flag} -create_symlink_to_file target="${FIELD_TABLE_FP}" \ - symlink="${DATA}/${FIELD_TABLE_FN}" \ - relative="${relative_link_flag}" +create_symlink_to_file ${FIELD_TABLE_FP} ${DATA}/${FIELD_TABLE_FN} ${relative_link_flag} -create_symlink_to_file target="${FIELD_DICT_FP}" \ - symlink="${DATA}/${FIELD_DICT_FN}" \ - relative="${relative_link_flag}" +create_symlink_to_file ${FIELD_DICT_FP} ${DATA}/${FIELD_DICT_FN} ${relative_link_flag} -if [ ${WRITE_DOPOST} = "TRUE" ]; then - cp_vrfy ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat - if [ ${USE_CUSTOM_POST_CONFIG_FILE} = "TRUE" ]; then +if [ $(boolify ${WRITE_DOPOST}) = "TRUE" ]; then + cp ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat + if [ $(boolify ${USE_CUSTOM_POST_CONFIG_FILE}) = "TRUE" ]; then post_config_fp="${CUSTOM_POST_CONFIG_FP}" print_info_msg " ==================================================================== CUSTOM_POST_CONFIG_FP = \"${CUSTOM_POST_CONFIG_FP}\" ====================================================================" else - if [ "${CPL_AQM}" = "TRUE" ]; then + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then post_config_fp="${PARMdir}/upp/postxconfig-NT-AQM.txt" else post_config_fp="${PARMdir}/upp/postxconfig-NT-fv3lam.txt" @@ -417,11 +528,11 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then post_config_fp = \"${post_config_fp}\" ====================================================================" fi - cp_vrfy ${post_config_fp} ./postxconfig-NT_FH00.txt - cp_vrfy ${post_config_fp} ./postxconfig-NT.txt - cp_vrfy ${PARMdir}/upp/params_grib2_tbl_new . + cp ${post_config_fp} ./postxconfig-NT_FH00.txt + cp ${post_config_fp} ./postxconfig-NT.txt + cp ${PARMdir}/upp/params_grib2_tbl_new . # Set itag for inline-post: - if [ "${CPL_AQM}" = "TRUE" ]; then + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then post_itag_add="aqf_on=.true.," else post_itag_add="" @@ -446,7 +557,7 @@ fi #---------------------------------------------------------------------- # -cp_vrfy ${CCPP_PHYS_DIR}/noahmptable.tbl . +cp ${CCPP_PHYS_DIR}/noahmptable.tbl . # #----------------------------------------------------------------------- @@ -456,14 +567,17 @@ cp_vrfy ${CCPP_PHYS_DIR}/noahmptable.tbl . #----------------------------------------------------------------------- # STOCH="FALSE" -if ([ "${DO_SPP}" = "TRUE" ] || [ "${DO_SPPT}" = "TRUE" ] || [ "${DO_SHUM}" = "TRUE" ] || \ - [ "${DO_SKEB}" = "TRUE" ] || [ "${DO_LSM_SPP}" = "TRUE" ]); then +if ([ $(boolify "${DO_SPP}") = "TRUE" ] || \ + [ $(boolify "${DO_SPPT}") = "TRUE" ] || \ + [ $(boolify "${DO_SHUM}") = "TRUE" ] || \ + [ $(boolify "${DO_SKEB}") = "TRUE" ] || \ + [ $(boolify "${DO_LSM_SPP}") = "TRUE" ]); then STOCH="TRUE" fi -if [ "${STOCH}" == "TRUE" ]; then - cp_vrfy ${FV3_NML_STOCH_FP} ${DATA}/${FV3_NML_FN} +if [ "${STOCH}" = "TRUE" ]; then + cp ${FV3_NML_STOCH_FP} ${DATA}/${FV3_NML_FN} else - ln_vrfy -sf ${FV3_NML_FP} ${DATA}/${FV3_NML_FN} + ln -sf ${FV3_NML_FP} ${DATA}/${FV3_NML_FN} fi # @@ -473,8 +587,8 @@ fi # #----------------------------------------------------------------------- # -if ([ "$STOCH" == "TRUE" ] && [ "${DO_ENSEMBLE}" = "TRUE" ]); then - python3 $USHdir/set_FV3nml_ens_stoch_seeds.py \ +if ([ "$STOCH" == "TRUE" ] && [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]); then + python3 $USHdir/set_fv3nml_ens_stoch_seeds.py \ --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ --cdate "$CDATE" || print_err_msg_exit "\ Call to function to create the ensemble-based namelist for the current @@ -490,10 +604,9 @@ fi # #----------------------------------------------------------------------- # -if [ "${CPL_AQM}" = "TRUE" ] && [ "${PREDEF_GRID_NAME}" = "AQM_NA_13km" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ] && [ "${PREDEF_GRID_NAME}" = "AQM_NA_13km" ]; then python3 $USHdir/update_input_nml.py \ - --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ - --run_dir "${DATA}" \ + --namelist "${DATA}/${FV3_NML_FN}" \ --aqm_na_13km || print_err_msg_exit "\ Call to function to update the FV3 input.nml file for air quality modeling using AQM_NA_13km for the current cycle's (cdate) run directory (DATA) failed: @@ -509,19 +622,18 @@ fi #----------------------------------------------------------------------- # flag_fcst_restart="FALSE" -if [ "${DO_FCST_RESTART}" = "TRUE" ] && [ "$(ls -A ${DATA}/RESTART )" ]; then - cp_vrfy input.nml input.nml_orig - cp_vrfy model_configure model_configure_orig - if [ "${CPL_AQM}" = "TRUE" ]; then - cp_vrfy aqm.rc aqm.rc_orig +if [ $(boolify "${DO_FCST_RESTART}") = "TRUE" ] && [ "$(ls -A ${DATA}/RESTART )" ]; then + cp input.nml input.nml_orig + cp model_configure model_configure_orig + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then + cp aqm.rc aqm.rc_orig fi relative_link_flag="FALSE" flag_fcst_restart="TRUE" # Update FV3 input.nml for restart python3 $USHdir/update_input_nml.py \ - --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ - --run_dir "${DATA}" \ + --namelist "${DATA}/${FV3_NML_FN}" \ --restart export err=$? if [ $err -ne 0 ]; then @@ -561,14 +673,14 @@ for the current cycle's (cdate) run directory (DATA) failed: done # Create soft-link of restart files in INPUT directory - cd_vrfy ${DATA}/INPUT + cd ${DATA}/INPUT for file_id in "${file_ids[@]}"; do - rm_vrfy "${file_id}" + rm "${file_id}" target="${DATA}/RESTART/${rst_yyyymmdd}.${rst_hh}0000.${file_id}" symlink="${file_id}" - create_symlink_to_file target="$target" symlink="$symlink" relative="${relative_link_flag}" + create_symlink_to_file $target $symlink ${relative_link_flag} done - cd_vrfy ${DATA} + cd ${DATA} fi # #----------------------------------------------------------------------- @@ -577,8 +689,10 @@ fi # #----------------------------------------------------------------------- # -if [ "${CPL_AQM}" = "TRUE" ]; then - if [ "${COLDSTART}" = "TRUE" ] && [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ] && [ "${flag_fcst_restart}" = "FALSE" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then + if [ $(boolify "${COLDSTART}") = "TRUE" ] && \ + [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ] && \ + [ $(boolify "${flag_fcst_restart}") = "FALSE" ]; then init_concentrations="true" else init_concentrations="false" @@ -669,9 +783,9 @@ fi # #----------------------------------------------------------------------- # -if [ "${RUN_ENVIR}" = "nco" ] && [ "${CPL_AQM}" = "TRUE" ]; then +if [ "${RUN_ENVIR}" = "nco" ] && [ $(boolify "${CPL_AQM}") = "TRUE" ]; then # create an intermediate symlink to RESTART - ln_vrfy -sf "${DATA}/RESTART" "${COMIN}/RESTART" + ln -sf "${DATA}/RESTART" "${COMIN}/RESTART" fi # #----------------------------------------------------------------------- @@ -681,7 +795,7 @@ fi # #----------------------------------------------------------------------- # -python3 $USHdir/create_nems_configure_file.py \ +python3 $USHdir/create_ufs_configure_file.py \ --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ --run-dir "${DATA}" export err=$? @@ -728,17 +842,17 @@ POST_STEP # #----------------------------------------------------------------------- # -if [ "${CPL_AQM}" = "TRUE" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then if [ "${RUN_ENVIR}" = "nco" ]; then if [ -d "${COMIN}/RESTART" ] && [ "$(ls -A ${DATA}/RESTART)" ]; then - rm_vrfy -rf "${COMIN}/RESTART" + rm -rf "${COMIN}/RESTART" fi if [ "$(ls -A ${DATA}/RESTART)" ]; then - cp_vrfy -Rp ${DATA}/RESTART ${COMIN} + cp -Rp ${DATA}/RESTART ${COMIN} fi fi - cp_vrfy -p ${DATA}/${AQM_RC_PRODUCT_FN} ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${AQM_RC_PRODUCT_FN} + cp -p ${DATA}/${AQM_RC_PRODUCT_FN} ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${AQM_RC_PRODUCT_FN} fhr_ct=0 fhr=0 @@ -748,8 +862,8 @@ if [ "${CPL_AQM}" = "TRUE" ]; then source_phy="${DATA}/phyf${fhr_ct}.nc" target_dyn="${COMIN}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr_ct}.nc" target_phy="${COMIN}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr_ct}.nc" - [ -f ${source_dyn} ] && cp_vrfy -p ${source_dyn} ${target_dyn} - [ -f ${source_phy} ] && cp_vrfy -p ${source_phy} ${target_phy} + [ -f ${source_dyn} ] && cp -p ${source_dyn} ${target_dyn} + [ -f ${source_phy} ] && cp -p ${source_phy} ${target_phy} (( fhr=fhr+1 )) done fi @@ -761,8 +875,8 @@ fi # #----------------------------------------------------------------------- # -if [ ${WRITE_DOPOST} = "TRUE" ]; then - +if [ $(boolify ${WRITE_DOPOST}) = "TRUE" ]; then + yyyymmdd=${PDY} hh=${cyc} fmn="00" @@ -770,9 +884,9 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then if [ "${RUN_ENVIR}" != "nco" ]; then export COMOUT="${DATA}/postprd" fi - mkdir_vrfy -p "${COMOUT}" + mkdir -p "${COMOUT}" - cd_vrfy ${COMOUT} + cd ${COMOUT} for fhr in $(seq -f "%03g" 0 ${FCST_LEN_HRS}); do @@ -788,7 +902,7 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then post_fn_suffix="GrbF${fhr_d}" post_renamed_fn_suffix="f${fhr}${post_mn_or_null}.${POST_OUTPUT_DOMAIN_NAME}.grib2" - if [ "${CPL_AQM}" = "TRUE" ]; then + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then fids=( "cmaq" ) else fids=( "prslev" "natlev" ) @@ -799,23 +913,21 @@ if [ ${WRITE_DOPOST} = "TRUE" ]; then post_orig_fn="${FID}.${post_fn_suffix}" post_renamed_fn="${NET}.${cycle}${dot_ensmem}.${fid}.${post_renamed_fn_suffix}" - mv_vrfy ${DATA}/${post_orig_fn} ${post_renamed_fn} + mv ${DATA}/${post_orig_fn} ${post_renamed_fn} if [ $RUN_ENVIR != "nco" ]; then basetime=$( $DATE_UTIL --date "$yyyymmdd $hh" +%y%j%H%M ) symlink_suffix="_${basetime}f${fhr}${post_mn}" - create_symlink_to_file target="${post_renamed_fn}" \ - symlink="${FID}${symlink_suffix}" \ - relative="TRUE" + create_symlink_to_file ${post_renamed_fn} ${FID}${symlink_suffix} TRUE fi # DBN alert - if [ $SENDDBN = "TRUE" ]; then + if [ "$SENDDBN" = "TRUE" ]; then $DBNROOT/bin/dbn_alert MODEL rrfs_post ${job} ${COMOUT}/${post_renamed_fn} fi done - if [ "${CPL_AQM}" = "TRUE" ]; then - mv_vrfy ${DATA}/dynf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr}.nc - mv_vrfy ${DATA}/phyf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr}.nc + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then + mv ${DATA}/dynf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.dyn.f${fhr}.nc + mv ${DATA}/phyf${fhr}.nc ${COMIN}/${NET}.${cycle}${dot_ensmem}.phy.f${fhr}.nc fi done diff --git a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh index 5bbe61f530..1c09dc09c6 100755 --- a/scripts/exregional_run_met_genensprod_or_ensemblestat.sh +++ b/scripts/exregional_run_met_genensprod_or_ensemblestat.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_ensgrid|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -114,53 +118,6 @@ set_vx_params \ # #----------------------------------------------------------------------- # -# Set additional field-dependent verification parameters. -# -#----------------------------------------------------------------------- -# -if [ "${grid_or_point}" = "grid" ]; then - - case "${FIELDNAME_IN_MET_FILEDIR_NAMES}" in - "APCP01h") - FIELD_THRESHOLDS="gt0.0, ge0.254, ge0.508, ge2.54" - ;; - "APCP03h") - FIELD_THRESHOLDS="gt0.0, ge0.508, ge2.54, ge6.350" - ;; - "APCP06h") - FIELD_THRESHOLDS="gt0.0, ge2.54, ge6.350, ge12.700" - ;; - "APCP24h") - FIELD_THRESHOLDS="gt0.0, ge6.350, ge12.700, ge25.400" - ;; - "ASNOW06h") - FIELD_THRESHOLDS="gt0.0, ge2.54, ge5.08, ge10.16, ge20.32" - ;; - "ASNOW24h") - FIELD_THRESHOLDS="gt0.0, ge2.54, ge5.08, ge10.16, ge20.32" - ;; - "REFC") - FIELD_THRESHOLDS="ge20, ge30, ge40, ge50" - ;; - "RETOP") - FIELD_THRESHOLDS="ge20, ge30, ge40, ge50" - ;; - *) - print_err_msg_exit "\ -Verification parameters have not been defined for this field -(FIELDNAME_IN_MET_FILEDIR_NAMES): - FIELDNAME_IN_MET_FILEDIR_NAMES = \"${FIELDNAME_IN_MET_FILEDIR_NAMES}\"" - ;; - esac - -elif [ "${grid_or_point}" = "point" ]; then - - FIELD_THRESHOLDS="" - -fi -# -#----------------------------------------------------------------------- -# # Set paths and file templates for input to and output from the MET/ # METplus tool to be run as well as other file/directory parameters. # @@ -233,13 +190,10 @@ for (( i=0; i<${NUM_ENS_MEMBERS}; i++ )); do template="${FCST_SUBDIR_TEMPLATE}/${FCST_FN_TEMPLATE}" fi - slash_ensmem_subdir_or_null="/${ensmem_name}" if [ -z "${FCST_INPUT_FN_TEMPLATE}" ]; then FCST_INPUT_FN_TEMPLATE="$(eval echo ${template})" else - FCST_INPUT_FN_TEMPLATE="\ -${FCST_INPUT_FN_TEMPLATE}, -$(eval echo ${template})" + FCST_INPUT_FN_TEMPLATE="${FCST_INPUT_FN_TEMPLATE}, $(eval echo ${template})" fi done @@ -251,6 +205,16 @@ STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}" #----------------------------------------------------------------------- # # Set the array of forecast hours for which to run the MET/METplus tool. +# This is done by starting with the full list of forecast hours for which +# there is forecast output and then removing from that list any forecast +# hours for which there is no corresponding observation data. +# +# Note that strictly speaking, this does not need to be done if the MET/ +# METplus tool being called is GenEnsProd (because this tool only operates +# on forecasts), but we run the check anyway in this case in order to +# keep the code here simpler and because the output of GenEnsProd for +# forecast hours with missing observations will not be used anyway in +# downstream verification tasks. # #----------------------------------------------------------------------- # @@ -271,7 +235,7 @@ set_vx_fhr_list \ # #----------------------------------------------------------------------- # -mkdir_vrfy -p "${OUTPUT_DIR}" +mkdir -p "${OUTPUT_DIR}" # #----------------------------------------------------------------------- # @@ -318,16 +282,30 @@ fi # # First, set the base file names. # -metplus_config_tmpl_fn="${VAR}" -metplus_config_tmpl_fn="${MetplusToolName}_${metplus_config_tmpl_fn}" -metplus_config_fn="${MetplusToolName}_${FIELDNAME_IN_MET_FILEDIR_NAMES}" -metplus_log_fn="${metplus_config_fn}" +metplus_config_tmpl_bn="${MetplusToolName}" +metplus_config_bn="${MetplusToolName}_${FIELDNAME_IN_MET_FILEDIR_NAMES}" +metplus_log_bn="${metplus_config_bn}" # # Add prefixes and suffixes (extensions) to the base file names. # -metplus_config_tmpl_fn="${metplus_config_tmpl_fn}.conf" -metplus_config_fn="${metplus_config_fn}.conf" -metplus_log_fn="metplus.log.${metplus_log_fn}" +metplus_config_tmpl_fn="${metplus_config_tmpl_bn}.conf" +metplus_config_fn="${metplus_config_bn}.conf" +metplus_log_fn="metplus.log.${metplus_log_bn}" +# +#----------------------------------------------------------------------- +# +# Load the yaml-like file containing the configuration for ensemble +# verification. +# +#----------------------------------------------------------------------- +# +det_or_ens="ens" +vx_config_fn="vx_config_${det_or_ens}.yaml" +vx_config_fp="${METPLUS_CONF}/${vx_config_fn}" +vx_config_dict=$(<"${vx_config_fp}") +# Indent each line of vx_config_dict so that it is aligned properly when +# included in the yaml-formatted variable "settings" below. +vx_config_dict=$( printf "%s\n" "${vx_config_dict}" | sed 's/^/ /' ) # #----------------------------------------------------------------------- # @@ -348,59 +326,65 @@ settings="\ # # MET/METplus information. # - 'metplus_tool_name': '${metplus_tool_name}' - 'MetplusToolName': '${MetplusToolName}' - 'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}' - 'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}' +'metplus_tool_name': '${metplus_tool_name}' +'MetplusToolName': '${MetplusToolName}' +'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}' +'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}' # # Date and forecast hour information. # - 'cdate': '$CDATE' - 'fhr_list': '${FHR_LIST}' +'cdate': '$CDATE' +'fhr_list': '${FHR_LIST}' # # Input and output directory/file information. # - 'metplus_config_fn': '${metplus_config_fn:-}' - 'metplus_log_fn': '${metplus_log_fn:-}' - 'obs_input_dir': '${OBS_INPUT_DIR:-}' - 'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' - 'fcst_input_dir': '${FCST_INPUT_DIR:-}' - 'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' - 'output_base': '${OUTPUT_BASE}' - 'output_dir': '${OUTPUT_DIR}' - 'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' - 'staging_dir': '${STAGING_DIR}' - 'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}' +'metplus_config_fn': '${metplus_config_fn:-}' +'metplus_log_fn': '${metplus_log_fn:-}' +'obs_input_dir': '${OBS_INPUT_DIR:-}' +'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' +'fcst_input_dir': '${FCST_INPUT_DIR:-}' +'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' +'output_base': '${OUTPUT_BASE}' +'output_dir': '${OUTPUT_DIR}' +'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' +'staging_dir': '${STAGING_DIR}' +'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}' # # Ensemble and member-specific information. # - 'num_ens_members': '${NUM_ENS_MEMBERS}' - 'ensmem_name': '${ensmem_name:-}' - 'time_lag': '${time_lag:-}' +'num_ens_members': '${NUM_ENS_MEMBERS}' +'ensmem_name': '${ensmem_name:-}' +'time_lag': '${time_lag:-}' # # Field information. # - 'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}' - 'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}' - 'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' - 'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' - 'obtype': '${OBTYPE}' - 'accum_hh': '${ACCUM_HH:-}' - 'accum_no_pad': '${ACCUM_NO_PAD:-}' - 'field_thresholds': '${FIELD_THRESHOLDS:-}' +'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}' +'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}' +'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' +'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' +'obtype': '${OBTYPE}' +'accum_hh': '${ACCUM_HH:-}' +'accum_no_pad': '${ACCUM_NO_PAD:-}' +'metplus_templates_dir': '${METPLUS_CONF:-}' +'input_field_group': '${VAR:-}' +'input_level_fcst': '${FCST_LEVEL:-}' +'input_thresh_fcst': '${FCST_THRESH:-}' +# +# Verification configuration dictionary. +# +'vx_config_dict': +${vx_config_dict:-} " # Render the template to create a METplus configuration file tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)") -cat > $tmpfile << EOF -$settings -EOF - +printf "%s" "$settings" > "$tmpfile" uw template render \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} \ - -v \ - --values-file "${tmpfile}" + --verbose \ + --values-file "${tmpfile}" \ + --search-path "/" err=$? rm $tmpfile diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh index 1fa249ecf8..abe5e3dd31 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_gridstat|task_run_vx_pointstat|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -118,60 +122,13 @@ set_vx_params \ #----------------------------------------------------------------------- # i="0" -if [ "${DO_ENSEMBLE}" = "TRUE" ]; then +if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]; then i=$( bc -l <<< "${ENSMEM_INDX}-1" ) fi time_lag=$( bc -l <<< "${ENS_TIME_LAG_HRS[$i]}*${SECS_PER_HOUR}" ) # #----------------------------------------------------------------------- # -# Set additional field-dependent verification parameters. -# -#----------------------------------------------------------------------- -# -if [ "${grid_or_point}" = "grid" ]; then - - case "${FIELDNAME_IN_MET_FILEDIR_NAMES}" in - "APCP01h") - FIELD_THRESHOLDS="gt0.0, ge0.254, ge0.508, ge1.27, ge2.54" - ;; - "APCP03h") - FIELD_THRESHOLDS="gt0.0, ge0.254, ge0.508, ge1.27, ge2.54, ge3.810, ge6.350" - ;; - "APCP06h") - FIELD_THRESHOLDS="gt0.0, ge0.254, ge0.508, ge1.27, ge2.54, ge3.810, ge6.350, ge8.890, ge12.700" - ;; - "APCP24h") - FIELD_THRESHOLDS="gt0.0, ge0.254, ge0.508, ge1.27, ge2.54, ge3.810, ge6.350, ge8.890, ge12.700, ge25.400" - ;; - "ASNOW06h") - FIELD_THRESHOLDS="gt0.0, ge2.54, ge5.08, ge10.16, ge20.32" - ;; - "ASNOW24h") - FIELD_THRESHOLDS="gt0.0, ge2.54, ge5.08, ge10.16, ge20.32" - ;; - "REFC") - FIELD_THRESHOLDS="ge20, ge30, ge40, ge50" - ;; - "RETOP") - FIELD_THRESHOLDS="ge20, ge30, ge40, ge50" - ;; - *) - print_err_msg_exit "\ -Verification parameters have not been defined for this field -(FIELDNAME_IN_MET_FILEDIR_NAMES): - FIELDNAME_IN_MET_FILEDIR_NAMES = \"${FIELDNAME_IN_MET_FILEDIR_NAMES}\"" - ;; - esac - -elif [ "${grid_or_point}" = "point" ]; then - - FIELD_THRESHOLDS="" - -fi -# -#----------------------------------------------------------------------- -# # Set paths and file templates for input to and output from the MET/ # METplus tool to be run as well as other file/directory parameters. # @@ -198,7 +155,7 @@ else # or, better, just remove this variale and code "/${ensmem_name}" where # slash_ensmem_subdir_or_null currently appears below. # - if [ "${DO_ENSEMBLE}" = "TRUE" ]; then + if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]; then slash_ensmem_subdir_or_null="/${ensmem_name}" else slash_ensmem_subdir_or_null="" @@ -252,6 +209,9 @@ STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}" #----------------------------------------------------------------------- # # Set the array of forecast hours for which to run the MET/METplus tool. +# This is done by starting with the full list of forecast hours for which +# there is forecast output and then removing from that list any forecast +# hours for which there is no corresponding observation data. # #----------------------------------------------------------------------- # @@ -272,7 +232,7 @@ set_vx_fhr_list \ # #----------------------------------------------------------------------- # -mkdir_vrfy -p "${OUTPUT_DIR}" +mkdir -p "${OUTPUT_DIR}" # #----------------------------------------------------------------------- # @@ -319,16 +279,30 @@ fi # # First, set the base file names. # -metplus_config_tmpl_fn="${VAR}" -metplus_config_tmpl_fn="${MetplusToolName}_${metplus_config_tmpl_fn}" -metplus_config_fn="${MetplusToolName}_${FIELDNAME_IN_MET_FILEDIR_NAMES}_${ensmem_name}" -metplus_log_fn="${metplus_config_fn}" +metplus_config_tmpl_bn="GridStat_or_PointStat" +metplus_config_bn="${MetplusToolName}_${FIELDNAME_IN_MET_FILEDIR_NAMES}_${ensmem_name}" +metplus_log_bn="${metplus_config_bn}" # # Add prefixes and suffixes (extensions) to the base file names. # -metplus_config_tmpl_fn="${metplus_config_tmpl_fn}.conf" -metplus_config_fn="${metplus_config_fn}.conf" -metplus_log_fn="metplus.log.${metplus_log_fn}" +metplus_config_tmpl_fn="${metplus_config_tmpl_bn}.conf" +metplus_config_fn="${metplus_config_bn}.conf" +metplus_log_fn="metplus.log.${metplus_log_bn}" +# +#----------------------------------------------------------------------- +# +# Load the yaml-like file containing the configuration for deterministic +# verification. +# +#----------------------------------------------------------------------- +# +det_or_ens="det" +vx_config_fn="vx_config_${det_or_ens}.yaml" +vx_config_fp="${METPLUS_CONF}/${vx_config_fn}" +vx_config_dict=$(<"${vx_config_fp}") +# Indent each line of vx_config_dict so that it is aligned properly when +# included in the yaml-formatted variable "settings" below. +vx_config_dict=$( printf "%s\n" "${vx_config_dict}" | sed 's/^/ /' ) # #----------------------------------------------------------------------- # @@ -349,59 +323,65 @@ settings="\ # # MET/METplus information. # - 'metplus_tool_name': '${metplus_tool_name}' - 'MetplusToolName': '${MetplusToolName}' - 'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}' - 'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}' +'metplus_tool_name': '${metplus_tool_name}' +'MetplusToolName': '${MetplusToolName}' +'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}' +'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}' # # Date and forecast hour information. # - 'cdate': '$CDATE' - 'fhr_list': '${FHR_LIST}' +'cdate': '$CDATE' +'fhr_list': '${FHR_LIST}' # # Input and output directory/file information. # - 'metplus_config_fn': '${metplus_config_fn:-}' - 'metplus_log_fn': '${metplus_log_fn:-}' - 'obs_input_dir': '${OBS_INPUT_DIR:-}' - 'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' - 'fcst_input_dir': '${FCST_INPUT_DIR:-}' - 'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' - 'output_base': '${OUTPUT_BASE}' - 'output_dir': '${OUTPUT_DIR}' - 'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' - 'staging_dir': '${STAGING_DIR}' - 'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}' +'metplus_config_fn': '${metplus_config_fn:-}' +'metplus_log_fn': '${metplus_log_fn:-}' +'obs_input_dir': '${OBS_INPUT_DIR:-}' +'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' +'fcst_input_dir': '${FCST_INPUT_DIR:-}' +'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' +'output_base': '${OUTPUT_BASE}' +'output_dir': '${OUTPUT_DIR}' +'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' +'staging_dir': '${STAGING_DIR}' +'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}' # # Ensemble and member-specific information. # - 'num_ens_members': '${NUM_ENS_MEMBERS}' - 'ensmem_name': '${ensmem_name:-}' - 'time_lag': '${time_lag:-}' +'num_ens_members': '${NUM_ENS_MEMBERS}' +'ensmem_name': '${ensmem_name:-}' +'time_lag': '${time_lag:-}' # # Field information. # - 'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}' - 'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}' - 'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' - 'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' - 'obtype': '${OBTYPE}' - 'accum_hh': '${ACCUM_HH:-}' - 'accum_no_pad': '${ACCUM_NO_PAD:-}' - 'field_thresholds': '${FIELD_THRESHOLDS:-}' +'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}' +'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}' +'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' +'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' +'obtype': '${OBTYPE}' +'accum_hh': '${ACCUM_HH:-}' +'accum_no_pad': '${ACCUM_NO_PAD:-}' +'metplus_templates_dir': '${METPLUS_CONF:-}' +'input_field_group': '${VAR:-}' +'input_level_fcst': '${FCST_LEVEL:-}' +'input_thresh_fcst': '${FCST_THRESH:-}' +# +# Verification configuration dictionary. +# +'vx_config_dict': +${vx_config_dict:-} " # Render the template to create a METplus configuration file tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)") -cat > $tmpfile << EOF -$settings -EOF - +printf "%s" "$settings" > "$tmpfile" uw template render \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} \ - -v \ - --values-file "${tmpfile}" + --verbose \ + --values-file "${tmpfile}" \ + --search-path "/" err=$? rm $tmpfile @@ -415,7 +395,6 @@ $settings" print_err_msg_exit "${message_txt}" fi fi - # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh index 067c24ec07..2c8378c128 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensmean.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_ensgrid_mean|task_run_vx_enspoint_mean|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -103,53 +107,6 @@ set_vx_params \ # #----------------------------------------------------------------------- # -# Set additional field-dependent verification parameters. -# -#----------------------------------------------------------------------- -# -if [ "${grid_or_point}" = "grid" ]; then - - case "${FIELDNAME_IN_MET_FILEDIR_NAMES}" in - "APCP01h") - FIELD_THRESHOLDS="gt0.0, ge0.254, ge0.508, ge2.54" - ;; - "APCP03h") - FIELD_THRESHOLDS="gt0.0, ge0.508, ge2.54, ge6.350" - ;; - "APCP06h") - FIELD_THRESHOLDS="gt0.0, ge2.54, ge6.350, ge12.700" - ;; - "APCP24h") - FIELD_THRESHOLDS="gt0.0, ge6.350, ge12.700, ge25.400" - ;; - "ASNOW06h") - FIELD_THRESHOLDS="gt0.0, ge2.54, ge5.08, ge10.16, ge20.32" - ;; - "ASNOW24h") - FIELD_THRESHOLDS="gt0.0, ge2.54, ge5.08, ge10.16, ge20.32" - ;; - "REFC") - FIELD_THRESHOLDS="ge20, ge30, ge40, ge50" - ;; - "RETOP") - FIELD_THRESHOLDS="ge20, ge30, ge40, ge50" - ;; - *) - print_err_msg_exit "\ -Verification parameters have not been defined for this field -(FIELDNAME_IN_MET_FILEDIR_NAMES): - FIELDNAME_IN_MET_FILEDIR_NAMES = \"${FIELDNAME_IN_MET_FILEDIR_NAMES}\"" - ;; - esac - -elif [ "${grid_or_point}" = "point" ]; then - - FIELD_THRESHOLDS="" - -fi -# -#----------------------------------------------------------------------- -# # Set paths and file templates for input to and output from the MET/ # METplus tool to be run as well as other file/directory parameters. # @@ -201,6 +158,9 @@ STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}_ensmean" #----------------------------------------------------------------------- # # Set the array of forecast hours for which to run the MET/METplus tool. +# This is done by starting with the full list of forecast hours for which +# there is forecast output and then removing from that list any forecast +# hours for which there is no corresponding observation data. # #----------------------------------------------------------------------- # @@ -221,7 +181,7 @@ set_vx_fhr_list \ # #----------------------------------------------------------------------- # -mkdir_vrfy -p "${OUTPUT_DIR}" +mkdir -p "${OUTPUT_DIR}" # #----------------------------------------------------------------------- # @@ -277,16 +237,30 @@ fi # # First, set the base file names. # -metplus_config_tmpl_fn="${VAR}" -metplus_config_tmpl_fn="${MetplusToolName}_ensmean_${metplus_config_tmpl_fn}" -metplus_config_fn="${MetplusToolName}_ensmean_${FIELDNAME_IN_MET_FILEDIR_NAMES}" -metplus_log_fn="${metplus_config_fn}" +metplus_config_tmpl_bn="${MetplusToolName}_ensmean" +metplus_config_bn="${MetplusToolName}_ensmean_${FIELDNAME_IN_MET_FILEDIR_NAMES}" +metplus_log_bn="${metplus_config_bn}" # # Add prefixes and suffixes (extensions) to the base file names. # -metplus_config_tmpl_fn="${metplus_config_tmpl_fn}.conf" -metplus_config_fn="${metplus_config_fn}.conf" -metplus_log_fn="metplus.log.${metplus_log_fn}" +metplus_config_tmpl_fn="${metplus_config_tmpl_bn}.conf" +metplus_config_fn="${metplus_config_bn}.conf" +metplus_log_fn="metplus.log.${metplus_log_bn}" +# +#----------------------------------------------------------------------- +# +# Load the yaml-like file containing the configuration for ensemble +# verification. +# +#----------------------------------------------------------------------- +# +det_or_ens="ens" +vx_config_fn="vx_config_${det_or_ens}.yaml" +vx_config_fp="${METPLUS_CONF}/${vx_config_fn}" +vx_config_dict=$(<"${vx_config_fp}") +# Indent each line of vx_config_dict so that it is aligned properly when +# included in the yaml-formatted variable "settings" below. +vx_config_dict=$( printf "%s\n" "${vx_config_dict}" | sed 's/^/ /' ) # #----------------------------------------------------------------------- # @@ -307,59 +281,65 @@ settings="\ # # MET/METplus information. # - 'metplus_tool_name': '${metplus_tool_name}' - 'MetplusToolName': '${MetplusToolName}' - 'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}' - 'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}' +'metplus_tool_name': '${metplus_tool_name}' +'MetplusToolName': '${MetplusToolName}' +'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}' +'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}' # # Date and forecast hour information. # - 'cdate': '$CDATE' - 'fhr_list': '${FHR_LIST}' +'cdate': '$CDATE' +'fhr_list': '${FHR_LIST}' # # Input and output directory/file information. # - 'metplus_config_fn': '${metplus_config_fn:-}' - 'metplus_log_fn': '${metplus_log_fn:-}' - 'obs_input_dir': '${OBS_INPUT_DIR:-}' - 'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' - 'fcst_input_dir': '${FCST_INPUT_DIR:-}' - 'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' - 'output_base': '${OUTPUT_BASE}' - 'output_dir': '${OUTPUT_DIR}' - 'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' - 'staging_dir': '${STAGING_DIR}' - 'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}' +'metplus_config_fn': '${metplus_config_fn:-}' +'metplus_log_fn': '${metplus_log_fn:-}' +'obs_input_dir': '${OBS_INPUT_DIR:-}' +'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' +'fcst_input_dir': '${FCST_INPUT_DIR:-}' +'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' +'output_base': '${OUTPUT_BASE}' +'output_dir': '${OUTPUT_DIR}' +'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' +'staging_dir': '${STAGING_DIR}' +'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}' # # Ensemble and member-specific information. # - 'num_ens_members': '${NUM_ENS_MEMBERS}' - 'ensmem_name': '${ensmem_name:-}' - 'time_lag': '${time_lag:-}' +'num_ens_members': '${NUM_ENS_MEMBERS}' +'ensmem_name': '${ensmem_name:-}' +'time_lag': '${time_lag:-}' # # Field information. # - 'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}' - 'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}' - 'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' - 'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' - 'obtype': '${OBTYPE}' - 'accum_hh': '${ACCUM_HH:-}' - 'accum_no_pad': '${ACCUM_NO_PAD:-}' - 'field_thresholds': '${FIELD_THRESHOLDS:-}' +'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}' +'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}' +'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' +'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' +'obtype': '${OBTYPE}' +'accum_hh': '${ACCUM_HH:-}' +'accum_no_pad': '${ACCUM_NO_PAD:-}' +'metplus_templates_dir': '${METPLUS_CONF:-}' +'input_field_group': '${VAR:-}' +'input_level_fcst': '${FCST_LEVEL:-}' +'input_thresh_fcst': '${FCST_THRESH:-}' +# +# Verification configuration dictionary. +# +'vx_config_dict': +${vx_config_dict:-} " # Render the template to create a METplus configuration file tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)") -cat > $tmpfile << EOF -$settings -EOF - +printf "%s" "$settings" > "$tmpfile" uw template render \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} \ - -v \ - --values-file "${tmpfile}" + --verbose \ + --values-file "${tmpfile}" \ + --search-path "/" err=$? rm $tmpfile @@ -373,7 +353,6 @@ $settings" print_err_msg_exit "${message_txt}" fi fi - # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh index e042b68bfe..eae1850ad8 100755 --- a/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh +++ b/scripts/exregional_run_met_gridstat_or_pointstat_vx_ensprob.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_vx_ensgrid_prob|task_run_vx_enspoint_prob|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -153,6 +157,9 @@ STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}_ensprob" #----------------------------------------------------------------------- # # Set the array of forecast hours for which to run the MET/METplus tool. +# This is done by starting with the full list of forecast hours for which +# there is forecast output and then removing from that list any forecast +# hours for which there is no corresponding observation data. # #----------------------------------------------------------------------- # @@ -173,7 +180,7 @@ set_vx_fhr_list \ # #----------------------------------------------------------------------- # -mkdir_vrfy -p "${OUTPUT_DIR}" +mkdir -p "${OUTPUT_DIR}" # #----------------------------------------------------------------------- # @@ -229,16 +236,30 @@ fi # # First, set the base file names. # -metplus_config_tmpl_fn="${VAR}" -metplus_config_tmpl_fn="${MetplusToolName}_ensprob_${metplus_config_tmpl_fn}" -metplus_config_fn="${MetplusToolName}_ensprob_${FIELDNAME_IN_MET_FILEDIR_NAMES}" -metplus_log_fn="${metplus_config_fn}" +metplus_config_tmpl_bn="${MetplusToolName}_ensprob" +metplus_config_bn="${MetplusToolName}_ensprob_${FIELDNAME_IN_MET_FILEDIR_NAMES}" +metplus_log_bn="${metplus_config_bn}" # # Add prefixes and suffixes (extensions) to the base file names. # -metplus_config_tmpl_fn="${metplus_config_tmpl_fn}.conf" -metplus_config_fn="${metplus_config_fn}.conf" -metplus_log_fn="metplus.log.${metplus_log_fn}" +metplus_config_tmpl_fn="${metplus_config_tmpl_bn}.conf" +metplus_config_fn="${metplus_config_bn}.conf" +metplus_log_fn="metplus.log.${metplus_log_bn}" +# +#----------------------------------------------------------------------- +# +# Load the yaml-like file containing the configuration for ensemble +# verification. +# +#----------------------------------------------------------------------- +# +det_or_ens="ens" +vx_config_fn="vx_config_${det_or_ens}.yaml" +vx_config_fp="${METPLUS_CONF}/${vx_config_fn}" +vx_config_dict=$(<"${vx_config_fp}") +# Indent each line of vx_config_dict so that it is aligned properly when +# included in the yaml-formatted variable "settings" below. +vx_config_dict=$( printf "%s\n" "${vx_config_dict}" | sed 's/^/ /' ) # #----------------------------------------------------------------------- # @@ -259,59 +280,65 @@ settings="\ # # MET/METplus information. # - 'metplus_tool_name': '${metplus_tool_name}' - 'MetplusToolName': '${MetplusToolName}' - 'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}' - 'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}' +'metplus_tool_name': '${metplus_tool_name}' +'MetplusToolName': '${MetplusToolName}' +'METPLUS_TOOL_NAME': '${METPLUS_TOOL_NAME}' +'metplus_verbosity_level': '${METPLUS_VERBOSITY_LEVEL}' # # Date and forecast hour information. # - 'cdate': '$CDATE' - 'fhr_list': '${FHR_LIST}' +'cdate': '$CDATE' +'fhr_list': '${FHR_LIST}' # # Input and output directory/file information. # - 'metplus_config_fn': '${metplus_config_fn:-}' - 'metplus_log_fn': '${metplus_log_fn:-}' - 'obs_input_dir': '${OBS_INPUT_DIR:-}' - 'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' - 'fcst_input_dir': '${FCST_INPUT_DIR:-}' - 'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' - 'output_base': '${OUTPUT_BASE}' - 'output_dir': '${OUTPUT_DIR}' - 'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' - 'staging_dir': '${STAGING_DIR}' - 'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}' +'metplus_config_fn': '${metplus_config_fn:-}' +'metplus_log_fn': '${metplus_log_fn:-}' +'obs_input_dir': '${OBS_INPUT_DIR:-}' +'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' +'fcst_input_dir': '${FCST_INPUT_DIR:-}' +'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' +'output_base': '${OUTPUT_BASE}' +'output_dir': '${OUTPUT_DIR}' +'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' +'staging_dir': '${STAGING_DIR}' +'vx_fcst_model_name': '${VX_FCST_MODEL_NAME}' # # Ensemble and member-specific information. # - 'num_ens_members': '${NUM_ENS_MEMBERS}' - 'ensmem_name': '${ensmem_name:-}' - 'time_lag': '${time_lag:-}' +'num_ens_members': '${NUM_ENS_MEMBERS}' +'ensmem_name': '${ensmem_name:-}' +'time_lag': '${time_lag:-}' # # Field information. # - 'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}' - 'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}' - 'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' - 'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' - 'obtype': '${OBTYPE}' - 'accum_hh': '${ACCUM_HH:-}' - 'accum_no_pad': '${ACCUM_NO_PAD:-}' - 'field_thresholds': '${FIELD_THRESHOLDS:-}' +'fieldname_in_obs_input': '${FIELDNAME_IN_OBS_INPUT}' +'fieldname_in_fcst_input': '${FIELDNAME_IN_FCST_INPUT}' +'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' +'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' +'obtype': '${OBTYPE}' +'accum_hh': '${ACCUM_HH:-}' +'accum_no_pad': '${ACCUM_NO_PAD:-}' +'metplus_templates_dir': '${METPLUS_CONF:-}' +'input_field_group': '${VAR:-}' +'input_level_fcst': '${FCST_LEVEL:-}' +'input_thresh_fcst': '${FCST_THRESH:-}' +# +# Verification configuration dictionary. +# +'vx_config_dict': +${vx_config_dict:-} " # Render the template to create a METplus configuration file tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)") -cat > $tmpfile << EOF -$settings -EOF - +printf "%s" "$settings" > "$tmpfile" uw template render \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} \ - -v \ - --values-file "${tmpfile}" + --verbose \ + --values-file "${tmpfile}" \ + --search-path "/" err=$? rm $tmpfile @@ -325,8 +352,6 @@ $settings" print_err_msg_exit "${message_txt}" fi fi - - # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_pb2nc_obs.sh b/scripts/exregional_run_met_pb2nc_obs.sh index 10d1beba4d..7e79fb4efb 100755 --- a/scripts/exregional_run_met_pb2nc_obs.sh +++ b/scripts/exregional_run_met_pb2nc_obs.sh @@ -8,7 +8,10 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_met_pb2nc_obs" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -140,7 +143,7 @@ set_vx_fhr_list \ # #----------------------------------------------------------------------- # -mkdir_vrfy -p "${OUTPUT_DIR}" +mkdir -p "${OUTPUT_DIR}" # #----------------------------------------------------------------------- # @@ -272,20 +275,17 @@ settings="\ 'obtype': '${OBTYPE}' 'accum_hh': '${ACCUM_HH:-}' 'accum_no_pad': '${ACCUM_NO_PAD:-}' - 'field_thresholds': '${FIELD_THRESHOLDS:-}' " # Render the template to create a METplus configuration file tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)") -cat > $tmpfile << EOF -$settings -EOF - +printf "%s" "$settings" > "$tmpfile" uw template render \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} \ - -v \ - --values-file "${tmpfile}" + --verbose \ + --values-file "${tmpfile}" \ + --search-path "/" err=$? rm $tmpfile @@ -299,7 +299,6 @@ $settings" print_err_msg_exit "${message_txt}" fi fi - # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_met_pcpcombine.sh b/scripts/exregional_run_met_pcpcombine.sh index 4a9222707a..026afb4eb2 100755 --- a/scripts/exregional_run_met_pcpcombine.sh +++ b/scripts/exregional_run_met_pcpcombine.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_met_pcpcombine|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -120,9 +124,9 @@ set_vx_params \ #----------------------------------------------------------------------- # time_lag="0" -if [ "${obs_or_fcst}" = "fcst" ]; then +if [ "${FCST_OR_OBS}" = "FCST" ]; then i="0" - if [ "${DO_ENSEMBLE}" = "TRUE" ]; then + if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]; then i=$( bc -l <<< "${ENSMEM_INDX}-1" ) fi time_lag=$( bc -l <<< "${ENS_TIME_LAG_HRS[$i]}*${SECS_PER_HOUR}" ) @@ -137,10 +141,9 @@ fi # vx_fcst_input_basedir=$( eval echo "${VX_FCST_INPUT_BASEDIR}" ) vx_output_basedir=$( eval echo "${VX_OUTPUT_BASEDIR}" ) -if [ "${obs_or_fcst}" = "fcst" ]; then +if [ "${FCST_OR_OBS}" = "FCST" ]; then ensmem_indx=$(printf "%0${VX_NDIGITS_ENSMEM_NAMES}d" $(( 10#${ENSMEM_INDX}))) ensmem_name="mem${ensmem_indx}" - if [ "${RUN_ENVIR}" = "nco" ]; then slash_cdate_or_null="" slash_ensmem_subdir_or_null="" @@ -158,7 +161,7 @@ if [ "${obs_or_fcst}" = "fcst" ]; then # or, better, just remove this variale and code "/${ensmem_name}" where # slash_ensmem_subdir_or_null currently appears below. # - if [ "${DO_ENSEMBLE}" = "TRUE" ]; then + if [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ]; then slash_ensmem_subdir_or_null="/${ensmem_name}" else slash_ensmem_subdir_or_null="" @@ -171,17 +174,7 @@ OBS_INPUT_FN_TEMPLATE="" FCST_INPUT_DIR="" FCST_INPUT_FN_TEMPLATE="" -if [ "${obs_or_fcst}" = "obs" ]; then - - OBS_INPUT_DIR="${OBS_DIR}" - OBS_INPUT_FN_TEMPLATE=$( eval echo ${OBS_CCPA_APCP_FN_TEMPLATE} ) - - OUTPUT_BASE="${vx_output_basedir}" - OUTPUT_DIR="${OUTPUT_BASE}/metprd/${MetplusToolName}_obs" - OUTPUT_FN_TEMPLATE=$( eval echo ${OBS_CCPA_APCP_FN_TEMPLATE_PCPCOMBINE_OUTPUT} ) - STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}" - -elif [ "${obs_or_fcst}" = "fcst" ]; then +if [ "${FCST_OR_OBS}" = "FCST" ]; then FCST_INPUT_DIR="${vx_fcst_input_basedir}" FCST_INPUT_FN_TEMPLATE=$( eval echo ${FCST_SUBDIR_TEMPLATE:+${FCST_SUBDIR_TEMPLATE}/}${FCST_FN_TEMPLATE} ) @@ -191,22 +184,36 @@ elif [ "${obs_or_fcst}" = "fcst" ]; then OUTPUT_FN_TEMPLATE=$( eval echo ${FCST_FN_TEMPLATE_PCPCOMBINE_OUTPUT} ) STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}" +elif [ "${FCST_OR_OBS}" = "OBS" ]; then + + OBS_INPUT_DIR="${OBS_DIR}" + OBS_INPUT_FN_TEMPLATE=$( eval echo ${OBS_CCPA_APCP_FN_TEMPLATE} ) + + OUTPUT_BASE="${vx_output_basedir}" + OUTPUT_DIR="${OUTPUT_BASE}/metprd/${MetplusToolName}_obs" + OUTPUT_FN_TEMPLATE=$( eval echo ${OBS_CCPA_APCP_FN_TEMPLATE_PCPCOMBINE_OUTPUT} ) + STAGING_DIR="${OUTPUT_BASE}/stage/${FIELDNAME_IN_MET_FILEDIR_NAMES}" + fi # #----------------------------------------------------------------------- # # Set the array of forecast hours for which to run the MET/METplus tool. +# This is done by starting with the full list of forecast hours for which +# there is forecast output and then removing from that list any forecast +# hours for which there is no corresponding observation data (if combining +# observed APCP) or forecast data (if combining forecast APCP). # #----------------------------------------------------------------------- # -if [ "${obs_or_fcst}" = "obs" ]; then - base_dir="${OBS_INPUT_DIR}" - fn_template="${OBS_INPUT_FN_TEMPLATE}" - num_missing_files_max="${NUM_MISSING_OBS_FILES_MAX}" -elif [ "${obs_or_fcst}" = "fcst" ]; then +if [ "${FCST_OR_OBS}" = "FCST" ]; then base_dir="${FCST_INPUT_DIR}" fn_template="${FCST_INPUT_FN_TEMPLATE}" num_missing_files_max="${NUM_MISSING_FCST_FILES_MAX}" +elif [ "${FCST_OR_OBS}" = "OBS" ]; then + base_dir="${OBS_INPUT_DIR}" + fn_template="${OBS_INPUT_FN_TEMPLATE}" + num_missing_files_max="${NUM_MISSING_OBS_FILES_MAX}" fi set_vx_fhr_list \ @@ -226,7 +233,7 @@ set_vx_fhr_list \ # #----------------------------------------------------------------------- # -mkdir_vrfy -p "${OUTPUT_DIR}" +mkdir -p "${OUTPUT_DIR}" # #----------------------------------------------------------------------- # @@ -273,8 +280,8 @@ fi # # First, set the base file names. # -metplus_config_tmpl_fn="${MetplusToolName}_${obs_or_fcst}" -metplus_config_fn="${metplus_config_tmpl_fn}_${FIELDNAME_IN_MET_FILEDIR_NAMES}${ENSMEM_INDX:+_${ensmem_name}}" +metplus_config_tmpl_fn="${MetplusToolName}" +metplus_config_fn="${metplus_config_tmpl_fn}_$(echo_lowercase ${FCST_OR_OBS})_${FIELDNAME_IN_MET_FILEDIR_NAMES}${ENSMEM_INDX:+_${ensmem_name}}" metplus_log_fn="${metplus_config_fn}_$CDATE" # # If operating on observation files, append the cycle date to the name @@ -283,13 +290,13 @@ metplus_log_fn="${metplus_config_fn}_$CDATE" # necessary to associate the configuration file with the cycle for which # it is used). # -if [ "${obs_or_fcst}" = "obs" ]; then +if [ "${FCST_OR_OBS}" = "OBS" ]; then metplus_config_fn="${metplus_log_fn}" fi # # Add prefixes and suffixes (extensions) to the base file names. # -metplus_config_tmpl_fn="${metplus_config_tmpl_fn}_${field}.conf" +metplus_config_tmpl_fn="${metplus_config_tmpl_fn}.conf" metplus_config_fn="${metplus_config_fn}.conf" metplus_log_fn="metplus.log.${metplus_log_fn}" # @@ -326,10 +333,8 @@ settings="\ # 'metplus_config_fn': '${metplus_config_fn:-}' 'metplus_log_fn': '${metplus_log_fn:-}' - 'obs_input_dir': '${OBS_INPUT_DIR:-}' - 'obs_input_fn_template': '${OBS_INPUT_FN_TEMPLATE:-}' - 'fcst_input_dir': '${FCST_INPUT_DIR:-}' - 'fcst_input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-}' + 'input_dir': '${FCST_INPUT_DIR:-${OBS_INPUT_DIR}}' + 'input_fn_template': '${FCST_INPUT_FN_TEMPLATE:-${OBS_INPUT_FN_TEMPLATE}}' 'output_base': '${OUTPUT_BASE}' 'output_dir': '${OUTPUT_DIR}' 'output_fn_template': '${OUTPUT_FN_TEMPLATE:-}' @@ -349,21 +354,24 @@ settings="\ 'fieldname_in_met_output': '${FIELDNAME_IN_MET_OUTPUT}' 'fieldname_in_met_filedir_names': '${FIELDNAME_IN_MET_FILEDIR_NAMES}' 'obtype': '${OBTYPE}' + 'FCST_OR_OBS': '${FCST_OR_OBS}' 'accum_hh': '${ACCUM_HH:-}' 'accum_no_pad': '${ACCUM_NO_PAD:-}' - 'field_thresholds': '${FIELD_THRESHOLDS:-}' + 'metplus_templates_dir': '${METPLUS_CONF:-}' + 'input_field_group': '${VAR:-}' + 'input_level_fcst': '${FCST_LEVEL:-}' + 'input_thresh_fcst': '${FCST_THRESH:-}' " + # Render the template to create a METplus configuration file tmpfile=$( $READLINK -f "$(mktemp ./met_plus_settings.XXXXXX.yaml)") -cat > $tmpfile << EOF -$settings -EOF - +printf "%s" "$settings" > "$tmpfile" uw template render \ -i ${metplus_config_tmpl_fp} \ -o ${metplus_config_fp} \ - -v \ - --values-file "${tmpfile}" + --verbose \ + --values-file "${tmpfile}" \ + --search-path "/" err=$? rm $tmpfile diff --git a/scripts/exregional_run_post.sh b/scripts/exregional_run_post.sh index 60f87c3eaf..3f0ca93df9 100755 --- a/scripts/exregional_run_post.sh +++ b/scripts/exregional_run_post.sh @@ -1,5 +1,62 @@ #!/usr/bin/env bash +# +#----------------------------------------------------------------------- +# +# The ex-script that runs UPP. +# +# Run-time environment variables: +# +# CDATE +# COMOUT +# DATA_FHR +# DBNROOT +# ENSMEM_INDX +# GLOBAL_VAR_DEFNS_FP +# NET +# PDY +# REDIRECT_OUT_ERR +# SENDDBN +# +# Experiment variables +# +# user: +# EXECdir +# MACHINE +# PARMdir +# RUN_ENVIR +# USHdir +# +# platform: +# PRE_TASK_CMDS +# RUN_CMD_POST +# +# workflow: +# VERBOSE +# +# task_run_fcst: +# DT_ATMOS +# +# task_run_post: +# CUSTOM_POST_CONFIG_FP +# KMP_AFFINITY_RUN_POST +# OMP_NUM_THREADS_RUN_POST +# OMP_STACKSIZE_RUN_POST +# NUMX +# POST_OUTPUT_DOMAIN_NAME +# SUB_HOURLY_POST +# USE_CUSTOM_POST_CONFIG_FILE +# +# global: +# CRTM_DIR +# USE_CRTM +# +# cpl_aqm_parm: +# CPL_AQM +# +#----------------------------------------------------------------------- +# + # #----------------------------------------------------------------------- # @@ -8,7 +65,10 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow global cpl_aqm_parm \ + task_run_fcst task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -80,9 +140,9 @@ fi # #----------------------------------------------------------------------- # -rm_vrfy -f fort.* -cp_vrfy ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat -if [ ${USE_CUSTOM_POST_CONFIG_FILE} = "TRUE" ]; then +rm -f fort.* +cp ${PARMdir}/upp/nam_micro_lookup.dat ./eta_micro_lookup.dat +if [ $(boolify ${USE_CUSTOM_POST_CONFIG_FILE}) = "TRUE" ]; then post_config_fp="${CUSTOM_POST_CONFIG_FP}" print_info_msg " ==================================================================== @@ -92,7 +152,7 @@ to the temporary work directory (DATA_FHR): DATA_FHR = \"${DATA_FHR}\" ====================================================================" else - if [ "${CPL_AQM}" = "TRUE" ]; then + if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then post_config_fp="${PARMdir}/upp/postxconfig-NT-AQM.txt" else post_config_fp="${PARMdir}/upp/postxconfig-NT-fv3lam.txt" @@ -105,18 +165,18 @@ temporary work directory (DATA_FHR): DATA_FHR = \"${DATA_FHR}\" ====================================================================" fi -cp_vrfy ${post_config_fp} ./postxconfig-NT.txt -cp_vrfy ${PARMdir}/upp/params_grib2_tbl_new . -if [ ${USE_CRTM} = "TRUE" ]; then - cp_vrfy ${CRTM_DIR}/Nalli.IRwater.EmisCoeff.bin ./ - cp_vrfy ${CRTM_DIR}/FAST*.bin ./ - cp_vrfy ${CRTM_DIR}/NPOESS.IRland.EmisCoeff.bin ./ - cp_vrfy ${CRTM_DIR}/NPOESS.IRsnow.EmisCoeff.bin ./ - cp_vrfy ${CRTM_DIR}/NPOESS.IRice.EmisCoeff.bin ./ - cp_vrfy ${CRTM_DIR}/AerosolCoeff.bin ./ - cp_vrfy ${CRTM_DIR}/CloudCoeff.bin ./ - cp_vrfy ${CRTM_DIR}/*.SpcCoeff.bin ./ - cp_vrfy ${CRTM_DIR}/*.TauCoeff.bin ./ +cp ${post_config_fp} ./postxconfig-NT.txt +cp ${PARMdir}/upp/params_grib2_tbl_new . +if [ $(boolify ${USE_CRTM}) = "TRUE" ]; then + cp ${CRTM_DIR}/Nalli.IRwater.EmisCoeff.bin ./ + cp ${CRTM_DIR}/FAST*.bin ./ + cp ${CRTM_DIR}/NPOESS.IRland.EmisCoeff.bin ./ + cp ${CRTM_DIR}/NPOESS.IRsnow.EmisCoeff.bin ./ + cp ${CRTM_DIR}/NPOESS.IRice.EmisCoeff.bin ./ + cp ${CRTM_DIR}/AerosolCoeff.bin ./ + cp ${CRTM_DIR}/CloudCoeff.bin ./ + cp ${CRTM_DIR}/*.SpcCoeff.bin ./ + cp ${CRTM_DIR}/*.TauCoeff.bin ./ print_info_msg " ==================================================================== Copying the external CRTM fix files from CRTM_DIR to the temporary @@ -155,7 +215,7 @@ hh=${cyc} # must be set to a null string. # mnts_secs_str="" -if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then +if [ $(boolify "${SUB_HOURLY_POST}") = "TRUE" ]; then if [ ${fhr}${fmn} = "00000" ]; then mnts_secs_str=":"$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${DT_ATMOS} seconds" "+%M:%S" ) else @@ -185,7 +245,7 @@ post_mn=${post_time:10:2} # # Create the input namelist file to the post-processor executable. # -if [ "${CPL_AQM}" = "TRUE" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then post_itag_add="aqf_on=.true.," else post_itag_add="" @@ -270,10 +330,10 @@ post_renamed_fn_suffix="f${fhr}${post_mn_or_null}.${POST_OUTPUT_DOMAIN_NAME}.gri # generates (i.e. "...prslev..." and "...natlev..." files) and move, # rename, and create symlinks to them. # -cd_vrfy "${COMOUT}" +cd "${COMOUT}" basetime=$( $DATE_UTIL --date "$yyyymmdd $hh" +%y%j%H%M ) symlink_suffix="${dot_ensmem}.${basetime}f${fhr}${post_mn}" -if [ "${CPL_AQM}" = "TRUE" ]; then +if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then fids=( "cmaq" ) else fids=( "prslev" "natlev" ) @@ -282,19 +342,17 @@ for fid in "${fids[@]}"; do FID=$(echo_uppercase $fid) post_orig_fn="${FID}.${post_fn_suffix}" post_renamed_fn="${NET}.${cycle}${dot_ensmem}.${fid}.${post_renamed_fn_suffix}" - mv_vrfy ${DATA_FHR}/${post_orig_fn} ${post_renamed_fn} + mv ${DATA_FHR}/${post_orig_fn} ${post_renamed_fn} if [ $RUN_ENVIR != "nco" ]; then - create_symlink_to_file target="${post_renamed_fn}" \ - symlink="${FID}${symlink_suffix}" \ - relative="TRUE" + create_symlink_to_file ${post_renamed_fn} ${FID}${symlink_suffix} TRUE fi # DBN alert - if [ $SENDDBN = "TRUE" ]; then + if [ "$SENDDBN" = "TRUE" ]; then $DBNROOT/bin/dbn_alert MODEL rrfs_post ${job} ${COMOUT}/${post_renamed_fn} fi done -rm_vrfy -rf ${DATA_FHR} +rm -rf ${DATA_FHR} # #----------------------------------------------------------------------- diff --git a/scripts/exregional_run_prdgen.sh b/scripts/exregional_run_prdgen.sh index 8fc72dff1c..5baa779821 100755 --- a/scripts/exregional_run_prdgen.sh +++ b/scripts/exregional_run_prdgen.sh @@ -8,7 +8,11 @@ #----------------------------------------------------------------------- # . $USHdir/source_util_funcs.sh -source_config_for_task "task_run_prdgen|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post task_run_prdgen ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -166,7 +170,7 @@ net4=$(echo ${NET:0:4} | tr '[:upper:]' '[:lower:]') for leveltype in prslev natlev ififip testbed do if [ -f ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 ]; then - ln_vrfy -sf --relative ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2 + ln -sf --relative ${COMOUT}/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2 wgrib2 ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2 -s > ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2.idx fi done @@ -175,7 +179,7 @@ done # Remap to additional output grids if requested #----------------------------------------------- -if [ ${DO_PARALLEL_PRDGEN} == "TRUE" ]; then +if [ $(boolify ${DO_PARALLEL_PRDGEN}) = "TRUE" ]; then # # parallel run wgrib2 for product generation # @@ -184,7 +188,7 @@ if [ ${PREDEF_GRID_NAME} = "RRFS_NA_3km" ]; then DATA=$COMOUT DATAprdgen=$DATA/prdgen_${fhr} -mkdir_vrfy $DATAprdgen +mkdir $DATAprdgen wgrib2 ${COMOUT}/${NET}.${cycle}.prslev.f${fhr}.grib2 >& $DATAprdgen/prslevf${fhr}.txt @@ -223,7 +227,7 @@ for domain in ${domains[@]} do for task in $(seq ${tasks[count]}) do - mkdir_vrfy -p $DATAprdgen/prdgen_${domain}_${task} + mkdir -p $DATAprdgen/prdgen_${domain}_${task} echo "$SCRIPTSdir/exregional_run_prdgen_subpiece.sh $fhr $cyc $task $domain ${DATAprdgen} ${COMOUT} &" >> $DATAprdgen/poescript_${fhr} done count=$count+1 @@ -269,7 +273,7 @@ else # if [ ${#ADDNL_OUTPUT_GRIDS[@]} -gt 0 ]; then - cd_vrfy ${COMOUT} + cd ${COMOUT} grid_specs_130="lambert:265:25.000000 233.862000:451:13545.000000 16.281000:337:13545.000000" grid_specs_200="lambert:253:50.000000 285.720000:108:16232.000000 16.201000:94:16232.000000" @@ -289,7 +293,7 @@ if [ ${#ADDNL_OUTPUT_GRIDS[@]} -gt 0 ]; then eval grid_specs=\$grid_specs_${grid} subdir=${COMOUT}/${grid}_grid - mkdir_vrfy -p ${subdir}/${fhr} + mkdir -p ${subdir}/${fhr} bg_remap=${subdir}/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 # Interpolate fields to new grid @@ -317,11 +321,11 @@ if [ ${#ADDNL_OUTPUT_GRIDS[@]} -gt 0 ]; then rm -f ${subdir}/${fhr}/tmp_${grid}.grib2 # Save to com directory - mkdir_vrfy -p ${COMOUT}/${grid}_grid - cp_vrfy ${bg_remap} ${COMOUT}/${grid}_grid/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 + mkdir -p ${COMOUT}/${grid}_grid + cp ${bg_remap} ${COMOUT}/${grid}_grid/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 if [[ -f ${COMOUT}/${grid}_grid/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 ]]; then - ln_vrfy -fs --relative ${COMOUT}/${grid}_grid/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2 + ln -fs --relative ${COMOUT}/${grid}_grid/${NET}.${cycle}${dot_ensmem}.${leveltype}.f${fhr}.${POST_OUTPUT_DOMAIN_NAME}.grib2 ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2 wgrib2 ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2 -s > ${COMOUT}/${net4}.${cycle}.${leveltype}.f${fhr}.${gridname}grib2.idx fi @@ -331,7 +335,7 @@ fi fi # block for parallel or series wgrib2 runs. -rm_vrfy -rf ${DATA_FHR} +rm -rf ${DATA_FHR} # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_aqm_ics.sh b/scripts/exsrw_aqm_ics.sh similarity index 56% rename from scripts/exregional_aqm_ics.sh rename to scripts/exsrw_aqm_ics.sh index 676cc4ed90..4fd040e597 100755 --- a/scripts/exregional_aqm_ics.sh +++ b/scripts/exsrw_aqm_ics.sh @@ -7,8 +7,11 @@ # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh -source_config_for_task "task_aqm_ics" ${GLOBAL_VAR_DEFNS_FP} +. ${USHsrw}/source_util_funcs.sh +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -17,7 +20,7 @@ source_config_for_task "task_aqm_ics" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; set -xue; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -53,33 +56,56 @@ tial or boundary condition files for the FV3 will be generated. # #----------------------------------------------------------------------- # -rst_dir=${PREV_CYCLE_DIR}/RESTART -rst_file=fv_tracer.res.tile1.nc -fv_tracer_file=${rst_dir}/${PDY}.${cyc}0000.${rst_file} -print_info_msg " - Looking for tracer restart file: \"${fv_tracer_file}\"" -if [ ! -r ${fv_tracer_file} ]; then - if [ -r ${rst_dir}/coupler.res ]; then - rst_info=( $( tail -n 1 ${rst_dir}/coupler.res ) ) - rst_date=$( printf "%04d%02d%02d%02d" ${rst_info[@]:0:4} ) - print_info_msg " - Tracer file not found. Checking available restart date: - requested date: \"${PDY}${cyc}\" - available date: \"${rst_date}\"" - if [ "${rst_date}" = "${PDY}${cyc}" ] ; then - fv_tracer_file=${rst_dir}/${rst_file} - if [ -r ${fv_tracer_file} ]; then - print_info_msg " - Tracer file found: \"${fv_tracer_file}\"" - else - message_txt="No suitable tracer restart file found." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2"]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi - fi +rst_dir="${PREV_CYCLE_DIR}/RESTART" +rst_file="fv_tracer.res.tile1.nc" +rst_file_with_date="${PDY}.${cyc}0000.${rst_file}" +if [ -e "${rst_dir}/${rst_file_with_date}" ]; then + fv_tracer_file="${rst_dir}/${rst_file_with_date}" +elif [ -e "${rst_dir}/${rst_file}" ]; then + fv_tracer_file="${rst_dir}/${rst_file}" +else + message_txt="Tracer restart file: \"${fv_tracer_file}\" is NOT found" + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" +fi +print_info_msg "Tracer restart file: \"${fv_tracer_file}\"" + +cplr_file="coupler.res" +cplr_file_with_date="${PDY}.${cyc}0000.${cplr_file}" +if [ -e "${rst_dir}/${cplr_file_with_date}" ]; then + coupler_file="${rst_dir}/${cplr_file_with_date}" +elif [ -e "${rst_dir}/${cplr_file}" ]; then + coupler_file="${rst_dir}/${cplr_file}" +else + message_txt="Coupler file: \"${coupler_file}\" is NOT found" + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" +fi +print_info_msg "Coupler file: \"${coupler_file}\"" + +if [ -r ${coupler_file} ]; then + rst_info=( $( tail -n 1 ${coupler_file} ) ) + # Remove leading zeros from ${rst_info[1]} + month="${rst_info[1]#"${rst_info[1]%%[!0]*}"}" + # Remove leading zeros from ${rst_info[2]} + day="${rst_info[2]#"${rst_info[2]%%[!0]*}"}" + # Format the date without leading zeros + rst_date=$(printf "%04d%02d%02d%02d" ${rst_info[0]} $((10#$month)) $((10#$day)) ${rst_info[3]}) + if [ "${rst_date}" = "${PDY}${cyc}" ]; then + if [ -r ${fv_tracer_file} ]; then + print_info_msg "Tracer restart file is for ${PDY}${cyc}" + else + message_txt="Tracer restart file \"${fv_tracer_file}\" is NOT readable." + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi + else + message_txt="Tracer restart file is NOT for ${PDY}${cyc}. +Checking available restart date: + requested date: \"${PDY}${cyc}\" + available date: \"${rst_date}\"" + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi fi # @@ -88,46 +114,41 @@ fi # Add air quality tracer variables from previous cycle's restart output # to atmosphere's initial condition file according to the steps below: # -# a. Python script to manipulate the files (see comments inside for -# details) +# a. Python script to manipulate the files (see comments inside for details) # b. Remove checksum attribute to prevent overflow -# # c. Rename reulting file as the expected atmospheric IC file # #----------------------------------------------------------------------- # -gfs_ic_file=${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc -wrk_ic_file=${DATA}/gfs.nc +gfs_ic_fn="${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc" +gfs_ic_fp="${DATA_SHARE}/${gfs_ic_fn}" +wrk_ic_fp="${DATA}/gfs.nc" print_info_msg " Adding air quality tracers to atmospheric initial condition file: tracer file: \"${fv_tracer_file}\" - FV3 IC file: \"${gfs_ic_file}\"" + FV3 IC file: \"${gfs_ic_fp}\"" -cp_vrfy ${gfs_ic_file} ${wrk_ic_file} -python3 ${HOMEdir}/sorc/AQM-utils/python_utils/add_aqm_ics.py --fv_tracer_file "${fv_tracer_file}" --wrk_ic_file "${wrk_ic_file}" +cp -p ${gfs_ic_fp} ${wrk_ic_fp} +${USHsrw}/aqm_utils_python/add_aqm_ics.py --fv_tracer_file "${fv_tracer_file}" --wrk_ic_file "${wrk_ic_fp}" export err=$? if [ $err -ne 0 ]; then message_txt="Call to python script \"add_aqm_ics.py\" failed." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi ncatted -a checksum,,d,s, tmp1.nc export err=$? if [ $err -ne 0 ]; then message_txt="Call to NCATTED returned with nonzero exit code." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi -cp_vrfy tmp1.nc ${gfs_ic_file} +mv tmp1.nc ${gfs_ic_fn} + +cp -p ${gfs_ic_fn} ${COMOUT} unset fv_tracer_file unset wrk_ic_file @@ -138,20 +159,17 @@ unset wrk_ic_file # #----------------------------------------------------------------------- # - print_info_msg " +print_info_msg " ======================================================================== -Successfully added air quality tracers to atmospheric initial condition -file!!! +Successfully added air quality tracers to atmospheric IC file!!! Exiting script: \"${scrfunc_fn}\" In directory: \"${scrfunc_dir}\" ========================================================================" - # #----------------------------------------------------------------------- # -# Restore the shell options saved at the beginning of this script/func- -# tion. +# Restore the shell options saved at the beginning of this script/function. # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_aqm_lbcs.sh b/scripts/exsrw_aqm_lbcs.sh similarity index 65% rename from scripts/exregional_aqm_lbcs.sh rename to scripts/exsrw_aqm_lbcs.sh index 09a33d40a2..7b3058ef34 100755 --- a/scripts/exregional_aqm_lbcs.sh +++ b/scripts/exsrw_aqm_lbcs.sh @@ -7,8 +7,12 @@ # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh -source_config_for_task "task_get_extrn_lbcs|task_make_orog|task_make_lbcs|cpl_aqm_parm|task_aqm_lbcs" ${GLOBAL_VAR_DEFNS_FP} +. ${USHsrw}/source_util_funcs.sh +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_get_extrn_lbcs task_make_lbcs task_make_orog ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -17,7 +21,7 @@ source_config_for_task "task_get_extrn_lbcs|task_make_orog|task_make_lbcs|cpl_aq # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; set -xue; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -77,10 +81,10 @@ fi # #----------------------------------------------------------------------- # -CDATE_MOD=$( $DATE_UTIL --utc --date "${PDY} ${cyc} UTC - ${EXTRN_MDL_LBCS_OFFSET_HRS} hours" "+%Y%m%d%H" ) -yyyymmdd=${CDATE_MOD:0:8} -mm="${CDATE_MOD:4:2}" -hh="${CDATE_MOD:8:2}" +CDATE_MOD=`$NDATE -${EXTRN_MDL_LBCS_OFFSET_HRS} ${PDY}${cyc}` +YYYYMMDD="${CDATE_MOD:0:8}" +MM="${CDATE_MOD:4:2}" +HH="${CDATE_MOD:8:2}" if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then cyc_mod=$(( ${cyc} - ${DATE_FIRST_CYCL:8:2} )) @@ -92,38 +96,40 @@ for i_lbc in $(seq ${LBC_SPEC_INTVL_HRS} ${LBC_SPEC_INTVL_HRS} ${FCST_LEN_HRS} ) LBC_SPEC_FCST_HRS+=("$i_lbc") done -if [ ${DO_AQM_CHEM_LBCS} = "TRUE" ]; then - - ext_lbcs_file=${AQM_LBCS_FILES} - chem_lbcs_fn=${ext_lbcs_file///${mm}} +# Copy lbcs files from DATA_SHARE +aqm_lbcs_fn_prefix="${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f" +for hr in 0 ${LBC_SPEC_FCST_HRS[@]}; do + fhr=$( printf "%03d" "${hr}" ) + aqm_lbcs_fn="${aqm_lbcs_fn_prefix}${fhr}.nc" + cp -p "${DATA_SHARE}/${aqm_lbcs_fn}" ${DATA} +done - chem_lbcs_fp=${DCOMINchem_lbcs}/${chem_lbcs_fn} +if [ $(boolify "${DO_AQM_CHEM_LBCS}") = "TRUE" ]; then + ext_lbcs_file="${AQM_LBCS_FILES}" + chem_lbcs_fn=${ext_lbcs_file///${MM}} + chem_lbcs_fp="${FIXaqm}/chemlbc/${chem_lbcs_fn}" if [ -f ${chem_lbcs_fp} ]; then #Copy the boundary condition file to the current location - cp_vrfy ${chem_lbcs_fp} . + cp -p ${chem_lbcs_fp} . else message_txt="The chemical LBC files do not exist: CHEM_BOUNDARY_CONDITION_FILE = \"${chem_lbcs_fp}\"" - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi for hr in 0 ${LBC_SPEC_FCST_HRS[@]}; do fhr=$( printf "%03d" "${hr}" ) - if [ -r ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fhr}.nc ]; then - ncks -A ${chem_lbcs_fn} ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fhr}.nc + aqm_lbcs_fn="${aqm_lbcs_fn_prefix}${fhr}.nc" + if [ -r "${aqm_lbcs_fn}" ]; then + ncks -A ${chem_lbcs_fn} ${aqm_lbcs_fn} export err=$? if [ $err -ne 0 ]; then message_txt="Call to NCKS returned with nonzero exit code." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi + cp -p ${aqm_lbcs_fn} "${aqm_lbcs_fn}_chemlbc" fi done @@ -139,54 +145,49 @@ fi # #----------------------------------------------------------------------- # -if [ ${DO_AQM_GEFS_LBCS} = "TRUE" ]; then - - AQM_GEFS_FILE_CYC=${AQM_GEFS_FILE_CYC:-"${hh}"} +if [ $(boolify "${DO_AQM_GEFS_LBCS}") = "TRUE" ]; then + AQM_GEFS_FILE_CYC=${AQM_GEFS_FILE_CYC:-"${HH}"} AQM_GEFS_FILE_CYC=$( printf "%02d" "${AQM_GEFS_FILE_CYC}" ) - GEFS_CYC_DIFF=$(( cyc - AQM_GEFS_FILE_CYC )) - if [ "${GEFS_CYC_DIFF}" -lt "0" ]; then - TSTEPDIFF=$( printf "%02d" $(( 24 + ${GEFS_CYC_DIFF} )) ) + gefs_cyc_diff=$(( cyc - AQM_GEFS_FILE_CYC )) + if [ "${YYYYMMDD}" = "${PDY}" ]; then + tstepdiff=$( printf "%02d" ${gefs_cyc_diff} ) else - TSTEPDIFF=$( printf "%02d" ${GEFS_CYC_DIFF} ) + tstepdiff=$( printf "%02d" $(( 24 + ${gefs_cyc_diff} )) ) fi - AQM_MOFILE_FN="${AQM_GEFS_FILE_PREFIX}.t${AQM_GEFS_FILE_CYC}z.atmf" - if [ "${DO_REAL_TIME}" = "TRUE" ]; then - AQM_MOFILE_FP="${COMINgefs}/gefs.${yyyymmdd}/${AQM_GEFS_FILE_CYC}/chem/sfcsig/${AQM_MOFILE_FN}" + aqm_mofile_fn="${AQM_GEFS_FILE_PREFIX}.t${AQM_GEFS_FILE_CYC}z.atmf" + if [ $(boolify "${DO_REAL_TIME}") = "TRUE" ]; then + aqm_mofile_fp="${COMINgefs}/gefs.${YYYYMMDD}/${AQM_GEFS_FILE_CYC}/chem/sfcsig/${aqm_mofile_fn}" else - AQM_MOFILE_FP="${DCOMINgefs}/${yyyymmdd}/${AQM_GEFS_FILE_CYC}/${AQM_MOFILE_FN}" + aqm_mofile_fp="${COMINgefs}/${YYYYMMDD}/${AQM_GEFS_FILE_CYC}/${aqm_mofile_fn}" fi # Check if GEFS aerosol files exist for hr in 0 ${LBC_SPEC_FCST_HRS[@]}; do hr_mod=$(( hr + EXTRN_MDL_LBCS_OFFSET_HRS )) fhr=$( printf "%03d" "${hr_mod}" ) - AQM_MOFILE_FHR_FP="${AQM_MOFILE_FP}${fhr}.nemsio" - if [ ! -e "${AQM_MOFILE_FHR_FP}" ]; then - message_txt="The GEFS file (AQM_MOFILE_FHR_FP) for LBCs of \"${cycle}\" does not exist: - AQM_MOFILE_FHR_FP = \"${AQM_MOFILE_FHR_FP}\"" - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - message_warning="WARNING: ${message_txt}" - print_info_msg "${message_warning}" - if [ ! -z "${maillist}" ]; then - echo "${message_warning}" | mail.py $maillist - fi + aqm_mofile_fhr_fp="${aqm_mofile_fp}${fhr}.nemsio" + if [ ! -e "${aqm_mofile_fhr_fp}" ]; then + message_txt="WARNING: The GEFS file (AQM_MOFILE_FHR_FP) for LBCs of \"${cycle}\" does not exist: + aqm_mofile_fhr_fp = \"${aqm_mofile_fhr_fp}\"" + if [ ! -z "${MAILTO}" ] && [ "${MACHINE}" = "WCOSS2" ]; then + echo "${message_txt}" | mail.py $maillist else print_err_msg_exit "${message_txt}" - fi + fi fi done - NUMTS="$(( FCST_LEN_HRS / LBC_SPEC_INTVL_HRS + 1 ))" + numts="$(( FCST_LEN_HRS / LBC_SPEC_INTVL_HRS + 1 ))" cat > gefs2lbc-nemsio.ini <>$pgmout 2>errfile + export err=$?; err_chk print_info_msg " ======================================================================== Successfully added GEFS aerosol LBCs !!! ========================================================================" -# fi + +for hr in 0 ${LBC_SPEC_FCST_HRS[@]}; do + fhr=$( printf "%03d" "${hr}" ) + aqm_lbcs_fn="${aqm_lbcs_fn_prefix}${fhr}.nc" + cp -p "${DATA}/${aqm_lbcs_fn}" ${COMOUT} +done # print_info_msg " ======================================================================== diff --git a/scripts/exregional_bias_correction_o3.sh b/scripts/exsrw_bias_correction_o3.sh similarity index 67% rename from scripts/exregional_bias_correction_o3.sh rename to scripts/exsrw_bias_correction_o3.sh index 709cc1957d..343e7e6f2b 100755 --- a/scripts/exregional_bias_correction_o3.sh +++ b/scripts/exsrw_bias_correction_o3.sh @@ -7,8 +7,12 @@ # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_bias_correction_o3" ${GLOBAL_VAR_DEFNS_FP} +. ${USHsrw}/source_util_funcs.sh +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_bias_correction_o3 ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -17,7 +21,7 @@ source_config_for_task "cpl_aqm_parm|task_bias_correction_o3" ${GLOBAL_VAR_DEFNS # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; set -xue; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -80,8 +84,8 @@ yyyymm_m1=${PDYm1:0:6} yyyy_m2=${PDYm2:0:4} yyyymm_m2=${PDYm2:0:6} yyyy_m3=${PDYm3:0:4} -yyyymm_m3=${PDYm3:0:6} - +yyyymm_m3=${PDYm3:0:6} + # #----------------------------------------------------------------------- # @@ -103,13 +107,11 @@ fi # STEP 1: Retrieve AIRNOW observation data #----------------------------------------------------------------------------- -mkdir_vrfy -p "${DATA}/data" +mkdir -p "${DATA}/data" -# Retrieve real-time airnow data for the last three days and convert them into netcdf. -# In the following for-loop, pdym stands for previous (m) day of the present day (PDY) -# in the NCO standards, i.e. PDYm1: 1day ago, PDYm2: 2days ago, PDYm3: 3days ago -for i_pdym in {1..3}; do - case $i_pdym in +# Retrieve real-time airnow data for the last three days and convert them into netcdf + for ipdym in {1..3}; do + case $ipdym in 1) cvt_yyyy="${yyyy_m1}" cvt_yyyymm="${yyyymm_m1}" @@ -134,22 +136,22 @@ for i_pdym in {1..3}; do cvt_input_fp="${cvt_input_dir}/YYYY/YYYYMMDD/${cvt_input_fn}" cvt_output_fp="${cvt_output_dir}/YYYY/YYYYMMDD/${cvt_output_fn}" - mkdir_vrfy -p "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}" - mkdir_vrfy -p "${cvt_output_dir}/${cvt_yyyy}/${cvt_pdy}" - cp_vrfy ${DCOMINairnow}/${cvt_pdy}/airnow/HourlyAQObs_${cvt_pdy}*.dat "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}" + mkdir -p "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}" + mkdir -p "${cvt_output_dir}/${cvt_yyyy}/${cvt_pdy}" + + if [ "$(ls -A ${DCOMINairnow}/${cvt_pdy}/airnow)" ]; then + cp ${DCOMINairnow}/${cvt_pdy}/airnow/HourlyAQObs_${cvt_pdy}*.dat "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}" + else + message_warning="WARNING: airnow data missing. skip this date ${cvt_pdy}" + print_info_msg "${message_warning}" + fi PREP_STEP eval ${RUN_CMD_SERIAL} ${EXECdir}/convert_airnow_csv ${cvt_input_fp} ${cvt_output_fp} ${cvt_pdy} ${cvt_pdy} ${REDIRECT_OUT_ERR} export err=$? - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then err_chk - else - if [ $err -ne 0 ]; then - print_err_msg_exit "Call to executable to run CONVERT_AIRNOW_CSV returned with nonzero exit code." - fi - fi POST_STEP -done + done #----------------------------------------------------------------------------- # STEP 2: Extracting PM2.5, O3, and met variables from CMAQ input and outputs @@ -158,7 +160,7 @@ done FCST_LEN_HRS=$( printf "%03d" ${FCST_LEN_HRS} ) ic=1 while [ $ic -lt 120 ]; do - if [ -s ${COMIN}/${NET}.${cycle}.chem_sfc.f${FCST_LEN_HRS}.nc ]; then + if [ -s ${COMIN}/${cyc}/${NET}.${cycle}.chem_sfc.f${FCST_LEN_HRS}.nc ]; then echo "cycle ${cyc} post1 is done!" break else @@ -173,113 +175,88 @@ fi # remove any pre-exit ${NET}.${cycle}.chem_sfc/met_sfc.nc for 2-stage post processing DATA_grid="${DATA}/data/bcdata.${yyyymm}/grid" if [ -d "${DATA_grid}/${cyc}z/${PDY}" ]; then - rm_vrfy -rf "${DATA_grid}/${cyc}z/${PDY}" + rm -rf "${DATA_grid}/${cyc}z/${PDY}" fi -mkdir_vrfy -p "${DATA_grid}/${cyc}z/${PDY}" -ln_vrfy -sf ${COMIN}/${NET}.${cycle}.chem_sfc.*.nc ${DATA_grid}/${cyc}z/${PDY} -ln_vrfy -sf ${COMIN}/${NET}.${cycle}.met_sfc.*.nc ${DATA_grid}/${cyc}z/${PDY} +mkdir -p "${DATA_grid}/${cyc}z/${PDY}" +ln -sf ${COMIN}/${cyc}/${NET}.${cycle}.chem_sfc.*.nc ${DATA_grid}/${cyc}z/${PDY} +ln -sf ${COMIN}/${cyc}/${NET}.${cycle}.met_sfc.*.nc ${DATA_grid}/${cyc}z/${PDY} #----------------------------------------------------------------------------- # STEP 3: Intepolating CMAQ O3 into AIRNow sites #----------------------------------------------------------------------------- -mkdir_vrfy -p ${DATA}/data/coords -mkdir_vrfy -p ${DATA}/data/site-lists.interp -mkdir_vrfy -p ${DATA}/out/ozone/${yyyy} -mkdir_vrfy -p ${DATA}/data/bcdata.${yyyymm}/interpolated/ozone/${yyyy} +mkdir -p ${DATA}/data/coords +mkdir -p ${DATA}/data/site-lists.interp +mkdir -p ${DATA}/out/ozone/${yyyy} +mkdir -p ${DATA}/data/bcdata.${yyyymm}/interpolated/ozone/${yyyy} -cp_vrfy ${PARMaqm_utils}/bias_correction/sites.valid.ozone.20230331.12z.list ${DATA}/data/site-lists.interp -cp_vrfy ${PARMaqm_utils}/bias_correction/aqm.t12z.chem_sfc.f000.nc ${DATA}/data/coords -cp_vrfy ${PARMaqm_utils}/bias_correction/config.interp.ozone.7-vars_${id_domain}.${cyc}z ${DATA} +cp ${PARMdir}/aqm_utils/bias_correction/sites.valid.ozone.20230331.12z.list ${DATA}/data/site-lists.interp +cp ${PARMdir}/aqm_utils/bias_correction/aqm.t12z.chem_sfc.f000.nc ${DATA}/data/coords +cp ${PARMdir}/aqm_utils/bias_correction/config.interp.ozone.7-vars_${id_domain}.${cyc}z ${DATA} PREP_STEP eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_bias_interpolate config.interp.ozone.7-vars_${id_domain}.${cyc}z ${cyc}z ${PDY} ${PDY} ${REDIRECT_OUT_ERR} export err=$? -if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then err_chk -else - if [ $err -ne 0 ]; then - print_err_msg_exit "Call to executable to run AQM_BIAS_INTERPOLATE returned with nonzero exit code." - fi -fi POST_STEP -cp_vrfy ${DATA}/out/ozone/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/ozone/${yyyy} - -if [ "${DO_AQM_SAVE_AIRNOW_HIST}" = "TRUE" ]; then - mkdir_vrfy -p ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/ozone/${yyyy} - cp_vrfy ${DATA}/out/ozone/${yyyy}/*nc ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/ozone/${yyyy} - - for i_pdym in {0..3}; do - case $i_pdym in - 0) - cvt_yyyy="${yyyy}" - cvt_yyyymm="${yyyymm}" - cvt_pdy="${PDY}" - ;; - 1) - cvt_yyyy="${yyyy_m1}" - cvt_yyyymm="${yyyymm_m1}" - cvt_pdy="${PDYm1}" - ;; - 2) - cvt_yyyy="${yyyy_m2}" - cvt_yyyymm="${yyyymm_m2}" - cvt_pdy="${PDYm2}" - ;; - 3) - cvt_yyyy="${yyyy_m3}" - cvt_yyyymm="${yyyymm_m3}" - cvt_pdy="${PDYm3}" - ;; - esac - # CSV and NetCDF files - mkdir_vrfy -p ${COMOUTbicor}/bcdata.${cvt_yyyymm}/airnow/csv/${cvt_yyyy}/${cvt_pdy} - mkdir_vrfy -p ${COMOUTbicor}/bcdata.${cvt_yyyymm}/airnow/netcdf/${cvt_yyyy}/${cvt_pdy} - if [ "${i_pdym}" != "0" ]; then - cp_vrfy ${DCOMINairnow}/${cvt_pdy}/airnow/HourlyAQObs_${cvt_pdy}*.dat ${COMOUTbicor}/bcdata.${cvt_yyyymm}/airnow/csv/${cvt_yyyy}/${cvt_pdy} - cp_vrfy ${DATA}/data/bcdata.${cvt_yyyymm}/airnow/netcdf/${cvt_yyyy}/${cvt_pdy}/HourlyAQObs.${cvt_pdy}.nc ${COMOUTbicor}/bcdata.${cvt_yyyymm}/airnow/netcdf/${cvt_yyyy}/${cvt_pdy} - fi - done - mkdir_vrfy -p ${COMOUTbicor}/bcdata.${yyyymm}/grid/${cyc}z/${PDY} - cp_vrfy ${COMIN}/${NET}.${cycle}.*sfc*.nc ${COMOUTbicor}/bcdata.${yyyymm}/grid/${cyc}z/${PDY} +cp ${DATA}/out/ozone/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/ozone/${yyyy} + +if [ $(boolify "${DO_AQM_SAVE_AIRNOW_HIST}") = "TRUE" ]; then + mkdir -p ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/ozone/${yyyy} + cp ${DATA}/out/ozone/${yyyy}/*nc ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/ozone/${yyyy} + + # CSV files + mkdir -p ${COMOUTbicor}/bcdata.${yyyymm}/airnow/csv/${yyyy}/${PDY} + mkdir -p ${COMOUTbicor}/bcdata.${yyyymm_m1}/airnow/csv/${yyyy_m1}/${PDYm1} + mkdir -p ${COMOUTbicor}/bcdata.${yyyymm_m2}/airnow/csv/${yyyy_m2}/${PDYm2} + mkdir -p ${COMOUTbicor}/bcdata.${yyyymm_m3}/airnow/csv/${yyyy_m3}/${PDYm3} + cp ${DCOMINairnow}/${PDYm1}/airnow/HourlyAQObs_${PDYm1}*.dat ${COMOUTbicor}/bcdata.${yyyymm_m1}/airnow/csv/${yyyy_m1}/${PDYm1} + cp ${DCOMINairnow}/${PDYm2}/airnow/HourlyAQObs_${PDYm2}*.dat ${COMOUTbicor}/bcdata.${yyyymm_m2}/airnow/csv/${yyyy_m2}/${PDYm2} + cp ${DCOMINairnow}/${PDYm3}/airnow/HourlyAQObs_${PDYm3}*.dat ${COMOUTbicor}/bcdata.${yyyymm_m3}/airnow/csv/${yyyy_m3}/${PDYm3} + + # NetCDF files + mkdir -p ${COMOUTbicor}/bcdata.${yyyymm}/airnow/netcdf/${yyyy}/${PDY} + mkdir -p ${COMOUTbicor}/bcdata.${yyyymm_m1}/airnow/netcdf/${yyyy_m1}/${PDYm1} + mkdir -p ${COMOUTbicor}/bcdata.${yyyymm_m2}/airnow/netcdf/${yyyy_m2}/${PDYm2} + mkdir -p ${COMOUTbicor}/bcdata.${yyyymm_m3}/airnow/netcdf/${yyyy_m3}/${PDYm3} + cp ${DATA}/data/bcdata.${yyyymm_m1}/airnow/netcdf/${yyyy_m1}/${PDYm1}/HourlyAQObs.${PDYm1}.nc ${COMOUTbicor}/bcdata.${yyyymm_m1}/airnow/netcdf/${yyyy_m1}/${PDYm1} + cp ${DATA}/data/bcdata.${yyyymm_m2}/airnow/netcdf/${yyyy_m2}/${PDYm2}/HourlyAQObs.${PDYm2}.nc ${COMOUTbicor}/bcdata.${yyyymm_m2}/airnow/netcdf/${yyyy_m2}/${PDYm2} + cp ${DATA}/data/bcdata.${yyyymm_m3}/airnow/netcdf/${yyyy_m3}/${PDYm3}/HourlyAQObs.${PDYm3}.nc ${COMOUTbicor}/bcdata.${yyyymm_m3}/airnow/netcdf/${yyyy_m3}/${PDYm3} + + mkdir -p "${COMOUTbicor}/bcdata.${yyyymm}/grid/${cyc}z/${PDY}" + cp ${COMIN}/${cyc}/${NET}.${cycle}.*_sfc.f*.nc ${COMOUTbicor}/bcdata.${yyyymm}/grid/${cyc}z/${PDY} fi #----------------------------------------------------------------------------- # STEP 4: Performing Bias Correction for Ozone #----------------------------------------------------------------------------- -rm_vrfy -rf ${DATA}/data/bcdata* +rm -rf ${DATA}/data/bcdata* -ln_vrfy -sf ${COMINbicor}/bcdata* "${DATA}/data" +ln -sf ${COMINbicor}/bcdata* "${DATA}/data" -mkdir_vrfy -p ${DATA}/data/sites -cp_vrfy ${PARMaqm_utils}/bias_correction/config.ozone.bias_corr_${id_domain}.${cyc}z ${DATA} +mkdir -p ${DATA}/data/sites +cp ${PARMdir}/aqm_utils/bias_correction/config.ozone.bias_corr_${id_domain}.${cyc}z ${DATA} PREP_STEP eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_bias_correct config.ozone.bias_corr_${id_domain}.${cyc}z ${cyc}z ${BC_STDAY} ${PDY} ${REDIRECT_OUT_ERR} export err=$? -if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then err_chk -else - if [ $err -ne 0 ]; then - print_err_msg_exit "Call to executable to run AQM_BIAS_CORRECT returned with nonzero exit code." - fi -fi POST_STEP -cp_vrfy ${DATA}/out/ozone.corrected* ${COMIN} +cp ${DATA}/out/ozone.corrected* ${COMOUT} if [ "${cyc}" = "12" ]; then - cp_vrfy ${DATA}/data/sites/sites.valid.ozone.${PDY}.${cyc}z.list ${DATA} + cp ${DATA}/data/sites/sites.valid.ozone.${PDY}.${cyc}z.list ${DATA} fi #----------------------------------------------------------------------------- # STEP 5: converting netcdf to grib format #----------------------------------------------------------------------------- -ln_vrfy -sf ${COMIN}/ozone.corrected.${PDY}.${cyc}z.nc . +ln -sf ${COMIN}/${cyc}/ozone.corrected.${PDY}.${cyc}z.nc . # cat >bias_cor.ini < filesize export XLFRTEOPTS="unit_vars=yes" @@ -408,11 +373,11 @@ EOF1 export FORT12="filesize" export FORT31= export FORT51=awpaqm.${cycle}.${hr}ho3-max-bc.227.grib2 - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm-${hr}hro3-maxi.${cycle}.227 + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm-${hr}hro3-maxi.${cycle}.227 done # Post Files to COMOUTwmo - cp_vrfy awpaqm.${cycle}.*o3-max-bc.227.grib2 ${COMOUTwmo} + cp awpaqm.${cycle}.*o3-max-bc.227.grib2 ${COMOUTwmo} # Distribute Data if [ "${SENDDBN_NTC}" = "TRUE" ] ; then @@ -423,13 +388,13 @@ EOF1 fi #------------------------------------- -rm_vrfy -rf tmpfile +rm -rf tmpfile fhr=01 while [ "${fhr}" -le "${FCST_LEN_HRS}" ]; do fhr3d=$( printf "%03d" "${fhr}" ) - cp_vrfy ${DATA}/${NET}.${cycle}.awpozcon_bc.f${fhr3d}.${id_domain}.grib2 ${COMOUT} + cp ${DATA}/${NET}.${cycle}.awpozcon_bc.f${fhr3d}.${id_domain}.grib2 ${COMOUT} # create GRIB file to convert to grid 227 then to GRIB2 for NDFD cat ${DATA}/${NET}.${cycle}.awpozcon_bc.f${fhr3d}.${id_domain}.grib2 >> tmpfile @@ -453,13 +418,13 @@ newgrib2file2=${NET}.${cycle}.ave_8hr_o3_bc.227.grib2 grid227="lambert:265.0000:25.0000:25.0000 226.5410:1473:5079.000 12.1900:1025:5079.000" wgrib2 tmpfile.1hr -set_grib_type c3b -new_grid_winds earth -new_grid ${grid227} ${newgrib2file1} -cp_vrfy tmpfile.1hr ${COMOUT}/${NET}.${cycle}.ave_1hr_o3_bc.${id_domain}.grib2 -cp_vrfy ${NET}.${cycle}.ave_1hr_o3_bc.227.grib2 ${COMOUT} +cp tmpfile.1hr ${COMOUT}/${NET}.${cycle}.ave_1hr_o3_bc.${id_domain}.grib2 +cp ${NET}.${cycle}.ave_1hr_o3_bc.227.grib2 ${COMOUT} if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then wgrib2 tmpfile.8hr -set_grib_type c3b -new_grid_winds earth -new_grid ${grid227} ${newgrib2file2} - cp_vrfy tmpfile.8hr ${COMOUT}/${NET}.${cycle}.ave_8hr_o3_bc.${id_domain}.grib2 - cp_vrfy ${NET}.${cycle}.ave_8hr_o3_bc.227.grib2 ${COMOUT} + cp tmpfile.8hr ${COMOUT}/${NET}.${cycle}.ave_8hr_o3_bc.${id_domain}.grib2 + cp ${NET}.${cycle}.ave_8hr_o3_bc.227.grib2 ${COMOUT} fi if [ "${SENDDBN}" = "TRUE" ] ; then @@ -482,7 +447,7 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT12="filesize" export FORT31= export FORT51=grib2.${cycle}.awpcsozcon_aqm_${hr}-bc.temp - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_ave_${hr}hr_o3_bc-awpozcon.${cycle}.227 + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_ave_${hr}hr_o3_bc-awpozcon.${cycle}.227 echo `ls -l grib2.${cycle}.awpcsozcon_aqm_${hr}-bc.temp | awk '{print $5} '` > filesize export XLFRTEOPTS="unit_vars=yes" @@ -490,7 +455,7 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT12="filesize" export FORT31= export FORT51=awpaqm.${cycle}.${hr}ho3-bc.227.grib2 - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_ave_${hr}hr_o3_bc-awpozcon.${cycle}.227 + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_ave_${hr}hr_o3_bc-awpozcon.${cycle}.227 # Create AWIPS GRIB data for dailly 1-hr and 8hr max ozone echo 0 > filesize @@ -499,7 +464,7 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT12="filesize" export FORT31= export FORT51=${NET}.${cycle}.max_${hr}hr_o3-bc.227.grib2.temp - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm-${hr}hro3_bc-maxi.${cycle}.227 + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm-${hr}hro3_bc-maxi.${cycle}.227 echo `ls -l ${NET}.${cycle}.max_${hr}hr_o3-bc.227.grib2.temp | awk '{print $5} '` > filesize export XLFRTEOPTS="unit_vars=yes" @@ -507,11 +472,11 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT12="filesize" export FORT31= export FORT51=awpaqm.${cycle}.${hr}ho3-max-bc.227.grib2 - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm-${hr}hro3_bc-maxi.${cycle}.227 + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm-${hr}hro3_bc-maxi.${cycle}.227 # Post Files to COMOUTwmo - cp_vrfy awpaqm.${cycle}.${hr}ho3-bc.227.grib2 ${COMOUTwmo} - cp_vrfy awpaqm.${cycle}.${hr}ho3-max-bc.227.grib2 ${COMOUTwmo} + cp awpaqm.${cycle}.${hr}ho3-bc.227.grib2 ${COMOUTwmo} + cp awpaqm.${cycle}.${hr}ho3-max-bc.227.grib2 ${COMOUTwmo} # Distribute Data if [ "${SENDDBN}" = "TRUE" ]; then @@ -520,7 +485,6 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then fi done fi - # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_bias_correction_pm25.sh b/scripts/exsrw_bias_correction_pm25.sh similarity index 67% rename from scripts/exregional_bias_correction_pm25.sh rename to scripts/exsrw_bias_correction_pm25.sh index 9503f744c9..70cf512589 100755 --- a/scripts/exregional_bias_correction_pm25.sh +++ b/scripts/exsrw_bias_correction_pm25.sh @@ -7,8 +7,12 @@ # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_bias_correction_pm25" ${GLOBAL_VAR_DEFNS_FP} +. ${USHsrw}/source_util_funcs.sh +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_bias_correction_pm25 ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -17,7 +21,7 @@ source_config_for_task "cpl_aqm_parm|task_bias_correction_pm25" ${GLOBAL_VAR_DEF # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; set -xue; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -103,13 +107,11 @@ fi # STEP 1: Retrieve AIRNOW observation data #----------------------------------------------------------------------------- -mkdir_vrfy -p "${DATA}/data" +mkdir -p "${DATA}/data" -# Retrieve real-time airnow data for the last three days. -# In the following for-loop, pdym stands for previous (m) day of the present day (PDY) -# in the NCO standards, i.e. PDYm1: 1day ago, PDYm2: 2days ago, PDYm3: 3days ago -for i_pdym in {1..3}; do - case $i_pdym in +# Retrieve real-time airnow data for the last three days + for ipdym in {1..3}; do + case $ipdym in 1) cvt_yyyy="${yyyy_m1}" cvt_yyyymm="${yyyymm_m1}" @@ -134,22 +136,21 @@ for i_pdym in {1..3}; do cvt_input_fp="${cvt_input_dir}/YYYY/YYYYMMDD/${cvt_input_fn}" cvt_output_fp="${cvt_output_dir}/YYYY/YYYYMMDD/${cvt_output_fn}" - mkdir_vrfy -p "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}" - mkdir_vrfy -p "${cvt_output_dir}/${cvt_yyyy}/${cvt_pdy}" - cp_vrfy ${DCOMINairnow}/${cvt_pdy}/airnow/HourlyAQObs_${cvt_pdy}*.dat "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}" - + mkdir -p "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}" + mkdir -p "${cvt_output_dir}/${cvt_yyyy}/${cvt_pdy}" + if [ "$(ls -A ${DCOMINairnow}/${cvt_pdy}/airnow)" ]; then + cp ${DCOMINairnow}/${cvt_pdy}/airnow/HourlyAQObs_${cvt_pdy}*.dat "${cvt_input_dir}/${cvt_yyyy}/${cvt_pdy}" + else + message_warning="WARNING: airnow data missing. skip this date ${cvt_pdy}" + print_info_msg "${message_warning}" + fi + PREP_STEP eval ${RUN_CMD_SERIAL} ${EXECdir}/convert_airnow_csv ${cvt_input_fp} ${cvt_output_fp} ${cvt_pdy} ${cvt_pdy} ${REDIRECT_OUT_ERR} export err=$? - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then err_chk - else - if [ $err -ne 0 ]; then - print_err_msg_exit "Call to executable to run CONVERT_AIRNOW_CSV returned with nonzero exit code." - fi - fi POST_STEP -done + done #----------------------------------------------------------------------------- # STEP 2: Extracting PM2.5, O3, and met variables from CMAQ input and outputs @@ -158,7 +159,7 @@ done FCST_LEN_HRS=$( printf "%03d" ${FCST_LEN_HRS} ) ic=1 while [ $ic -lt 120 ]; do - if [ -s ${COMIN}/${NET}.${cycle}.chem_sfc.f${FCST_LEN_HRS}.nc ]; then + if [ -s ${COMIN}/${cyc}/${NET}.${cycle}.chem_sfc.f${FCST_LEN_HRS}.nc ]; then echo "cycle ${cyc} post1 is done!" break else @@ -173,82 +174,70 @@ fi # remove any pre-exit ${NET}.${cycle}.chem_sfc/met_sfc.nc for 2-stage post processing DATA_grid="${DATA}/data/bcdata.${yyyymm}/grid" if [ -d "${DATA_grid}/${cyc}z/${PDY}" ]; then - rm_vrfy -rf "${DATA_grid}/${cyc}z/${PDY}" + rm -rf "${DATA_grid}/${cyc}z/${PDY}" fi -mkdir_vrfy -p "${DATA_grid}/${cyc}z/${PDY}" -ln_vrfy -sf ${COMIN}/${NET}.${cycle}.chem_sfc.*.nc ${DATA_grid}/${cyc}z/${PDY} -ln_vrfy -sf ${COMIN}/${NET}.${cycle}.met_sfc.*.nc ${DATA_grid}/${cyc}z/${PDY} +mkdir -p "${DATA_grid}/${cyc}z/${PDY}" +ln -sf ${COMIN}/${cyc}/${NET}.${cycle}.chem_sfc.*.nc ${DATA_grid}/${cyc}z/${PDY} +ln -sf ${COMIN}/${cyc}/${NET}.${cycle}.met_sfc.*.nc ${DATA_grid}/${cyc}z/${PDY} #----------------------------------------------------------------------- # STEP 3: Intepolating CMAQ PM2.5 into AIRNow sites #----------------------------------------------------------------------- -mkdir_vrfy -p ${DATA}/data/coords -mkdir_vrfy -p ${DATA}/data/site-lists.interp -mkdir_vrfy -p ${DATA}/out/pm25/${yyyy} -mkdir_vrfy -p ${DATA}/data/bcdata.${yyyymm}/interpolated/pm25/${yyyy} +mkdir -p ${DATA}/data/coords +mkdir -p ${DATA}/data/site-lists.interp +mkdir -p ${DATA}/out/pm25/${yyyy} +mkdir -p ${DATA}/data/bcdata.${yyyymm}/interpolated/pm25/${yyyy} -cp_vrfy ${PARMaqm_utils}/bias_correction/sites.valid.pm25.20230331.12z.list ${DATA}/data/site-lists.interp -cp_vrfy ${PARMaqm_utils}/bias_correction/aqm.t12z.chem_sfc.f000.nc ${DATA}/data/coords -cp_vrfy ${PARMaqm_utils}/bias_correction/config.interp.pm2.5.5-vars_${id_domain}.${cyc}z ${DATA} +cp ${PARMdir}/aqm_utils/bias_correction/sites.valid.pm25.20230331.12z.list ${DATA}/data/site-lists.interp +cp ${PARMdir}/aqm_utils/bias_correction/aqm.t12z.chem_sfc.f000.nc ${DATA}/data/coords +cp ${PARMdir}/aqm_utils/bias_correction/config.interp.pm2.5.5-vars_${id_domain}.${cyc}z ${DATA} PREP_STEP eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_bias_interpolate config.interp.pm2.5.5-vars_${id_domain}.${cyc}z ${cyc}z ${PDY} ${PDY} ${REDIRECT_OUT_ERR} export err=$? -if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then err_chk -else - if [ $err -ne 0 ]; then - print_err_msg_exit "Call to executable to run CONVERT_AIRNOW_CSV returned with nonzero exit code." - fi -fi POST_STEP -cp_vrfy ${DATA}/out/pm25/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/pm25/${yyyy} +cp ${DATA}/out/pm25/${yyyy}/*nc ${DATA}/data/bcdata.${yyyymm}/interpolated/pm25/${yyyy} -if [ "${DO_AQM_SAVE_AIRNOW_HIST}" = "TRUE" ]; then - mkdir_vrfy -p ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/pm25/${yyyy} - cp_vrfy ${DATA}/out/pm25/${yyyy}/*nc ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/pm25/${yyyy} +if [ $(boolify "${DO_AQM_SAVE_AIRNOW_HIST}") = "TRUE" ]; then +mkdir -p ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/pm25/${yyyy} +cp ${DATA}/out/pm25/${yyyy}/*nc ${COMOUTbicor}/bcdata.${yyyymm}/interpolated/pm25/${yyyy} fi #----------------------------------------------------------------------- # STEP 4: Performing Bias Correction for PM2.5 #----------------------------------------------------------------------- -rm_vrfy -rf ${DATA}/data/bcdata* +rm -rf ${DATA}/data/bcdata* -ln_vrfy -sf ${COMINbicor}/bcdata* "${DATA}/data" +ln -sf ${COMINbicor}/bcdata* "${DATA}/data" -mkdir_vrfy -p ${DATA}/data/sites +mkdir -p ${DATA}/data/sites -cp_vrfy ${PARMaqm_utils}/bias_correction/config.pm2.5.bias_corr_${id_domain}.${cyc}z ${DATA} -cp_vrfy ${PARMaqm_utils}/bias_correction/site_blocking.pm2.5.2021.0427.2-sites.txt ${DATA} -cp_vrfy ${PARMaqm_utils}/bias_correction/bias_thresholds.pm2.5.2015.1030.32-sites.txt ${DATA} +cp ${PARMdir}/aqm_utils/bias_correction/config.pm2.5.bias_corr_${id_domain}.${cyc}z ${DATA} +cp ${PARMdir}/aqm_utils/bias_correction/site_blocking.pm2.5.2021.0427.2-sites.txt ${DATA} +cp ${PARMdir}/aqm_utils/bias_correction/bias_thresholds.pm2.5.2015.1030.32-sites.txt ${DATA} PREP_STEP eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_bias_correct config.pm2.5.bias_corr_${id_domain}.${cyc}z ${cyc}z ${BC_STDAY} ${PDY} ${REDIRECT_OUT_ERR} export err=$? -if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then err_chk -else - if [ $err -ne 0 ]; then - print_err_msg_exit "Call to executable to run AQM_BIAS_CORRECT returned with nonzero exit code." - fi -fi POST_STEP -cp_vrfy $DATA/out/pm2.5.corrected* ${COMIN} +cp $DATA/out/pm2.5.corrected* ${COMOUT} if [ "${cyc}" = "12" ]; then - cp_vrfy ${DATA}/data/sites/sites.valid.pm25.${PDY}.${cyc}z.list ${DATA} + cp ${DATA}/data/sites/sites.valid.pm25.${PDY}.${cyc}z.list ${DATA} fi #------------------------------------------------------------------------ # STEP 5: converting netcdf to grib format #------------------------------------------------------------------------ -ln_vrfy -sf ${COMIN}/pm2.5.corrected.${PDY}.${cyc}z.nc . +ln -sf ${COMIN}/${cyc}/pm2.5.corrected.${PDY}.${cyc}z.nc . # convert from netcdf to grib2 format cat >bias_cor.ini < filesize export XLFRTEOPTS="unit_vars=yes" @@ -412,17 +386,17 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT12="filesize" export FORT31= export FORT51=awpaqm.${cycle}.1hpm25-bc.227.grib2 - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_pm25_bc.${cycle}.227 + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_pm25_bc.${cycle}.227 #################################################### - rm_vrfy -f filesize + rm -f filesize echo 0 > filesize export XLFRTEOPTS="unit_vars=yes" export FORT11=${NET}.${cycle}.max_1hr_pm25_bc.227.grib2 export FORT12="filesize" export FORT31= export FORT51=${NET}.${cycle}.max_1hr_pm25_bc.227.grib2.temp - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_max_1hr_pm25_bc.${cycle}.227 + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_max_1hr_pm25_bc.${cycle}.227 echo `ls -l ${NET}.${cycle}.max_1hr_pm25_bc.227.grib2.temp | awk '{print $5} '` > filesize export XLFRTEOPTS="unit_vars=yes" @@ -430,9 +404,9 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT12="filesize" export FORT31= export FORT51=awpaqm.${cycle}.daily-1hr-pm25-max-bc.227.grib2 - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_max_1hr_pm25_bc.${cycle}.227 + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_max_1hr_pm25_bc.${cycle}.227 - rm_vrfy -f filesize + rm -f filesize # daily_24hr_ave_PM2.5 echo 0 > filesize export XLFRTEOPTS="unit_vars=yes" @@ -440,7 +414,7 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT12="filesize" export FORT31= export FORT51=${NET}.${cycle}.ave_24hr_pm25_bc.227.grib2.temp - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_ave_24hrpm25_bc_awp.${cycle}.227 + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_ave_24hrpm25_bc_awp.${cycle}.227 echo `ls -l ${NET}.${cycle}.ave_24hr_pm25_bc.227.grib2.temp | awk '{print $5} '` > filesize export XLFRTEOPTS="unit_vars=yes" @@ -448,21 +422,20 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT12="filesize" export FORT31= export FORT51=awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2 - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_ave_24hrpm25_bc_awp.${cycle}.227 + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_ave_24hrpm25_bc_awp.${cycle}.227 # Post Files to COMOUTwmo - cp_vrfy awpaqm.${cycle}.1hpm25-bc.227.grib2 ${COMOUTwmo} - cp_vrfy awpaqm.${cycle}.daily-1hr-pm25-max-bc.227.grib2 ${COMOUTwmo} - cp_vrfy awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2 ${COMOUTwmo} + cp awpaqm.${cycle}.1hpm25-bc.227.grib2 ${COMOUTwmo} + cp awpaqm.${cycle}.daily-1hr-pm25-max-bc.227.grib2 ${COMOUTwmo} + cp awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2 ${COMOUTwmo} # Distribute Data if [ "${SENDDBN_NTC}" = "TRUE" ] ; then - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUT}/awpaqm.${cycle}.1hpm25-bc.227.grib2 - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUT}/awpaqm.${cycle}.daily-1hr-pm25-max-bc.227.grib2 - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUT}/awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2 + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.1hpm25-bc.227.grib2 + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.daily-1hr-pm25-max-bc.227.grib2 + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.24hr-pm25-ave-bc.227.grib2 fi fi - # #----------------------------------------------------------------------- # diff --git a/scripts/exsrw_fire_emission.sh b/scripts/exsrw_fire_emission.sh new file mode 100755 index 0000000000..3ae78422f5 --- /dev/null +++ b/scripts/exsrw_fire_emission.sh @@ -0,0 +1,170 @@ +#!/usr/bin/env bash + +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +. ${USHsrw}/source_util_funcs.sh +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# +{ save_shell_opts; set -xue; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " +======================================================================== +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the ex-script for the task that fetches fire emission +data files from disk or generates model-ready RAVE emission file from raw +data files. +========================================================================" +# +#----------------------------------------------------------------------- +# +# Set up variables for call to retrieve_data.py +# +#----------------------------------------------------------------------- +# +YYYYMMDD=${FIRE_FILE_CDATE:0:8} +HH=${FIRE_FILE_CDATE:8:2} + +CDATE_mh1=`$NDATE -1 ${YYYYMMDD}${HH}` +yyyymmdd_mh1=${CDATE_mh1:0:8} +hh_mh1=${CDATE_mh1:8:2} +# +#----------------------------------------------------------------------- +# +# Retrieve fire file to FIRE_EMISSION_STAGING_DIR +# +#----------------------------------------------------------------------- +# +aqm_fire_file_fn="${AQM_FIRE_FILE_PREFIX}_${YYYYMMDD}_t${HH}z${AQM_FIRE_FILE_SUFFIX}" + +# Check if the fire file exists in the designated directory +if [ -e "${COMINfire}/${aqm_fire_file_fn}" ]; then + cp -p "${COMINfire}/${aqm_fire_file_fn}" ${COMOUT} +else + # Copy raw data + for ihr in {0..23}; do + download_time=`$NDATE -$ihr ${yyyymmdd_mh1}${hh_mh1}` + FILE_curr="Hourly_Emissions_13km_${download_time}00_${download_time}00.nc" + FILE_13km="RAVE-HrlyEmiss-13km_v*_blend_s${download_time}00000_e${download_time}59590_c*.nc" + yyyymmdd_dn="${download_time:0:8}" + hh_dn="${download_time:8:2}" + missing_download_time=`$NDATE -24 ${yyyymmdd_dn}${hh_dn}` + yyyymmdd_dn_md1="${missing_download_time:0:8}" + FILE_13km_md1="RAVE-HrlyEmiss-13km_v*_blend_s${missing_download_time}00000_e${missing_download_time}59590_c*.nc" + if [ -s `ls ${COMINfire}/${yyyymmdd_dn}/rave/${FILE_13km}` ] && [ $(stat -c %s `ls ${COMINfire}/${yyyymmdd_dn}/rave/${FILE_13km}`) -gt 4000000 ]; then + cp -p ${COMINfire}/${yyyymmdd_dn}/rave/${FILE_13km} ${FILE_curr} + elif [ -s `ls ${COMINfire}/${yyyymmdd_dn_md1}/rave/${FILE_13km_md1}` ] && [ $(stat -c %s `ls ${COMINfire}/${yyyymmdd_dn_md1}/rave/${FILE_13km_md1}`) -gt 4000000 ]; then + echo "WARNING: ${FILE_13km} does not exist or broken. Replacing with the file of previous date ..." + cp -p ${COMINfire}/${yyyymmdd_dn_md1}/rave/${FILE_13km_md1} ${FILE_curr} + else + message_txt="WARNING Fire Emission RAW data does not exist or broken: + FILE_13km_md1 = \"${FILE_13km_md1}\" + DCOMINfire = \"${DCOMINfire}\"" + + cp -p ${FIXaqm}/fire/Hourly_Emissions_13km_dummy.nc ${FILE_curr} + print_info_msg "WARNING: ${message_txt}. Replacing with the dummy file :: AQM RUN SOFT FAILED." + fi + done + + ncks -O -h --mk_rec_dmn time Hourly_Emissions_13km_${download_time}00_${download_time}00.nc temp.nc + export err=$? + if [ $err -ne 0 ]; then + message_txt="Call to NCKS returned with nonzero exit code." + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" + fi + + mv temp.nc Hourly_Emissions_13km_${download_time}00_${download_time}00.nc + + ncrcat -h Hourly_Emissions_13km_*.nc Hourly_Emissions_13km_${YYYYMMDD}0000_${YYYYMMDD}2300.t${HH}z.nc + export err=$? + if [ $err -ne 0 ]; then + message_txt="Call to NCRCAT returned with nonzero exit code." + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" + fi + + input_fire="${DATA}/Hourly_Emissions_13km_${YYYYMMDD}0000_${YYYYMMDD}2300.t${HH}z.nc" + output_fire="${DATA}/Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_new24.t${HH}z.nc" + + ${USHsrw}/aqm_utils_python/RAVE_remake.allspecies.aqmna13km.g793.py --date "${YYYYMMDD}" --cyc "${HH}" --input_fire "${input_fire}" --output_fire "${output_fire}" + export err=$? + if [ $err -ne 0 ]; then + message_txt="Call to python script \"RAVE_remake.allspecies.py\" returned with nonzero exit code." + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" + fi + + ncks --mk_rec_dmn Time Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_new24.t${HH}z.nc -o Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24.nc + export err=$? + if [ $err -ne 0 ]; then + message_txt="Call to NCKS returned with nonzero exit code." + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" + fi + + cp -p Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24.nc Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24_1.nc + cp -p Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24.nc Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24_2.nc + + ncrcat -O -D 2 Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24.nc Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24_1.nc Hourly_Emissions_regrid_NA_13km_${YYYYMMDD}_t${HH}z_h24_2.nc ${aqm_fire_file_fn} + export err=$? + if [ $err -ne 0 ]; then + message_txt="Call to NCRCAT returned with nonzero exit code." + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" + fi + + mv ${aqm_fire_file_fn} temp.nc + ncrename -v PM2.5,PM25 temp.nc temp1.nc + ncap2 -s 'where(Latitude > 30 && Latitude <=49 && land_cover == 1 ) PM25 = PM25 * 0.44444' temp1.nc temp2.nc + ncap2 -s 'where(Latitude <=30 && land_cover == 1 ) PM25 = PM25 * 0.8' temp2.nc temp3.nc + ncap2 -s 'where(Latitude <=49 && land_cover == 3 ) PM25 = PM25 * 1.11111' temp3.nc temp4.nc + ncap2 -s 'where(Latitude <=49 && land_cover == 4 ) PM25 = PM25 * 1.11111' temp4.nc temp5.nc + ncrename -v PM25,PM2.5 temp5.nc temp6.nc + mv temp6.nc ${aqm_fire_file_fn} + + # Copy the final fire emission file to data share directory + cp -p "${DATA}/${aqm_fire_file_fn}" ${COMOUT} +fi +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/function. +# +#----------------------------------------------------------------------- +# +{ restore_shell_opts; } > /dev/null 2>&1 diff --git a/scripts/exregional_nexus_emission.sh b/scripts/exsrw_nexus_emission.sh similarity index 62% rename from scripts/exregional_nexus_emission.sh rename to scripts/exsrw_nexus_emission.sh index d1153d95b7..0fa8c48754 100755 --- a/scripts/exregional_nexus_emission.sh +++ b/scripts/exsrw_nexus_emission.sh @@ -7,8 +7,12 @@ # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_nexus_emission" ${GLOBAL_VAR_DEFNS_FP} +. ${USHsrw}/source_util_funcs.sh +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_nexus_emission ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -17,7 +21,7 @@ source_config_for_task "cpl_aqm_parm|task_nexus_emission" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; set -xue; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -42,7 +46,7 @@ print_info_msg " Entering script: \"${scrfunc_fn}\" In directory: \"${scrfunc_dir}\" -This is the ex-script for the task that runs NEXUS. +This is the ex-script for the task that runs NEXUS EMISSION. ========================================================================" # #----------------------------------------------------------------------- @@ -75,13 +79,12 @@ fi # #----------------------------------------------------------------------- # -# Move to the NEXUS working directory +# Create NEXUS input directory in working directory # #----------------------------------------------------------------------- # DATAinput="${DATA}/input" -mkdir_vrfy -p "$DATAinput" - +mkdir -p "$DATAinput" # #----------------------------------------------------------------------- # @@ -90,19 +93,13 @@ mkdir_vrfy -p "$DATAinput" #----------------------------------------------------------------------- # USE_GFS_SFC="FALSE" -if [ "${RUN_ENVIR}" = "nco" ]; then - GFS_SFC_INPUT="${DATAROOT}/nexus_gfs_sfc.${share_pid}" -else - GFS_SFC_INPUT="${COMIN}/GFS_SFC" -fi - +GFS_SFC_INPUT="${DATA_SHARE}" if [ -d "${GFS_SFC_INPUT}" ]; then - if [ "$(ls -A ${GFS_SFC_INPUT})" ]; then + if [ "$(ls -A ${GFS_SFC_INPUT}/gfs*.nc)" ]; then ln -sf "${GFS_SFC_INPUT}" "GFS_SFC" USE_GFS_SFC="TRUE" fi fi - # #----------------------------------------------------------------------- # @@ -110,14 +107,12 @@ fi # #----------------------------------------------------------------------- # -cp_vrfy ${EXECdir}/nexus ${DATA} -cp_vrfy ${NEXUS_FIX_DIR}/${NEXUS_GRID_FN} ${DATA}/grid_spec.nc - +cp -p ${FIXaqm}/nexus/${NEXUS_GRID_FN} ${DATA}/grid_spec.nc if [ "${USE_GFS_SFC}" = "TRUE" ]; then - cp_vrfy ${ARL_NEXUS_DIR}/config/cmaq_gfs_megan/*.rc ${DATA} + cp -p ${PARMsrw}/nexus_config/cmaq_gfs_megan/*.rc ${DATA} else - cp_vrfy ${ARL_NEXUS_DIR}/config/cmaq/*.rc ${DATA} + cp -p ${PARMsrw}/nexus_config/cmaq/*.rc ${DATA} fi # #----------------------------------------------------------------------- @@ -127,10 +122,10 @@ fi # #----------------------------------------------------------------------- # -mm="${PDY:4:2}" -dd="${PDY:6:2}" -hh="${cyc}" -yyyymmdd="${PDY}" +MM="${PDY:4:2}" +DD="${PDY:6:2}" +HH="${cyc}" +YYYYMMDD="${PDY}" NUM_SPLIT_NEXUS=$( printf "%02d" ${NUM_SPLIT_NEXUS} ) @@ -141,28 +136,33 @@ if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then fi if [ "${NUM_SPLIT_NEXUS}" = "01" ]; then - start_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC" "+%Y%m%d%H" ) - end_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${FCST_LEN_HRS} hours" "+%Y%m%d%H" ) + start_date="${YYYYMMDD}${HH}" + end_date=`$NDATE +${FCST_LEN_HRS} ${YYYYMMDD}${HH}` else len_per_split=$(( FCST_LEN_HRS / NUM_SPLIT_NEXUS )) nsptp=$(( nspt+1 )) # Compute start and end dates for nexus split option start_del_hr=$(( len_per_split * nspt )) - start_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${start_del_hr} hours " "+%Y%m%d%H" ) + start_date=`$NDATE +${start_del_hr} ${YYYYMMDD}${HH}` if [ "${nsptp}" = "${NUM_SPLIT_NEXUS}" ];then - end_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + $(expr $FCST_LEN_HRS + 1) hours" "+%Y%m%d%H" ) + end_date=`$NDATE +$(expr $FCST_LEN_HRS + 1) ${YYYYMMDD}${HH}` else end_del_hr=$(( len_per_split * nsptp )) - end_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + $(expr $end_del_hr + 1) hours" "+%Y%m%d%H" ) + end_del_hr1=$(( $end_del_hr + 1 )) + end_date=`$NDATE +${end_del_hr1} ${YYYYMMDD}${HH}` fi fi # -####################################################################### +#---------------------------------------------------------------------- +# # This will be the section to set the datasets used in $workdir/NEXUS_Config.rc # All Datasets in that file need to be placed here as it will link the files # necessary to that folder. In the future this will be done by a get_nexus_input # script +# +#---------------------------------------------------------------------- +# NEI2016="TRUE" TIMEZONES="TRUE" CEDS="TRUE" @@ -173,148 +173,138 @@ NOAAGMD="TRUE" SOA="TRUE" EDGAR="TRUE" MEGAN="TRUE" -MODIS_XLAI="TRUE" +MODIS_XLAI="FALSE" OLSON_MAP="TRUE" Yuan_XLAI="TRUE" GEOS="TRUE" AnnualScalar="TRUE" - -NEXUS_INPUT_BASE_DIR=${NEXUS_INPUT_DIR} -######################################################################## - +OFFLINE_SOILNOX="TRUE" # #---------------------------------------------------------------------- # # modify time configuration file # -python3 ${ARL_NEXUS_DIR}/utils/python/nexus_time_parser.py -f ${DATA}/HEMCO_sa_Time.rc -s $start_date -e $end_date +#---------------------------------------------------------------------- +# +${USHsrw}/nexus_utils/python/nexus_time_parser.py -f ${DATA}/HEMCO_sa_Time.rc -s $start_date -e $end_date export err=$? if [ $err -ne 0 ]; then message_txt="Call to python script \"nexus_time_parser.py\" failed." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi - # #--------------------------------------------------------------------- # # set the root directory to the temporary directory # -python3 ${ARL_NEXUS_DIR}/utils/python/nexus_root_parser.py -f ${DATA}/NEXUS_Config.rc -d ${DATAinput} +#---------------------------------------------------------------------- +# +${USHsrw}/nexus_utils/python/nexus_root_parser.py -f ${DATA}/NEXUS_Config.rc -d ${DATAinput} export err=$? if [ $err -ne 0 ]; then message_txt="Call to python script \"nexus_root_parser.py\" failed." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi - # #---------------------------------------------------------------------- +# # Get all the files needed (TEMPORARILY JUST COPY FROM THE DIRECTORY) # +#---------------------------------------------------------------------- +# if [ "${NEI2016}" = "TRUE" ]; then #NEI2016 - mkdir_vrfy -p ${DATAinput}/NEI2016v1 - mkdir_vrfy -p ${DATAinput}/NEI2016v1/v2022-07 - mkdir_vrfy -p ${DATAinput}/NEI2016v1/v2022-07/${mm} - python3 ${ARL_NEXUS_DIR}/utils/python/nexus_nei2016_linker.py --src_dir ${NEXUS_INPUT_BASE_DIR} --date ${yyyymmdd} --work_dir ${DATAinput} -v "v2022-07" + mkdir -p ${DATAinput}/NEI2016v1 + mkdir -p ${DATAinput}/NEI2016v1/v2022-07 + mkdir -p ${DATAinput}/NEI2016v1/v2022-07/${MM} + ${USHsrw}/nexus_utils/python/nexus_nei2016_linker.py --src_dir ${FIXemis} --date ${YYYYMMDD} --work_dir ${DATAinput} -v "v2022-07" export err=$? if [ $err -ne 0 ]; then message_txt="Call to python script \"nexus_nei2016_linker.py\" failed." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi - python3 ${ARL_NEXUS_DIR}/utils/python/nexus_nei2016_control_tilefix.py -f ${DATA}/NEXUS_Config.rc -t ${DATA}/HEMCO_sa_Time.rc # -d ${yyyymmdd} + ${USHsrw}/nexus_utils/python/nexus_nei2016_control_tilefix.py -f ${DATA}/NEXUS_Config.rc -t ${DATA}/HEMCO_sa_Time.rc # -d ${yyyymmdd} export err=$? if [ $err -ne 0 ]; then message_txt="Call to python script \"nexus_nei2016_control_tilefix.py\" failed." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi fi if [ "${TIMEZONES}" = "TRUE" ]; then # TIME ZONES - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/TIMEZONES ${DATAinput}/ + ln -sf ${FIXemis}/TIMEZONES ${DATAinput} fi if [ "${MASKS}" = "TRUE" ]; then # MASKS - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/MASKS ${DATAinput}/ + ln -sf ${FIXemis}/MASKS ${DATAinput} fi if [ "${CEDS}" = "TRUE" ]; then #CEDS - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/CEDS ${DATAinput}/ + ln -sf ${FIXemis}/CEDS ${DATAinput} fi if [ "${HTAP2010}" = "TRUE" ]; then #CEDS2014 - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/HTAP ${DATAinput}/ + ln -sf ${FIXemis}/HTAP ${DATAinput} fi if [ "${OMIHTAP}" = "TRUE" ]; then #CEDS2014 - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/OMI-HTAP_2019 ${DATAinput}/ + ln -sf ${FIXemis}/OMI-HTAP_2019 ${DATAinput} fi if [ "${NOAAGMD}" = "TRUE" ]; then #NOAA_GMD - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/NOAA_GMD ${DATAinput}/ + ln -sf ${FIXemis}/NOAA_GMD ${DATAinput} fi if [ "${SOA}" = "TRUE" ]; then #SOA - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/SOA ${DATAinput}/ + ln -sf ${FIXemis}/SOA ${DATAinput} fi if [ "${EDGAR}" = "TRUE" ]; then #EDGARv42 - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/EDGARv42 ${DATAinput}/ + ln -sf ${FIXemis}/EDGARv42 ${DATAinput} fi if [ "${MEGAN}" = "TRUE" ]; then #MEGAN - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/MEGAN ${DATAinput}/ + ln -sf ${FIXemis}/MEGAN ${DATAinput} fi if [ "${OLSON_MAP}" = "TRUE" ]; then #OLSON_MAP - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/OLSON_MAP ${DATAinput}/ + ln -sf ${FIXemis}/OLSON_MAP ${DATAinput} fi if [ "${Yuan_XLAI}" = "TRUE" ]; then #Yuan_XLAI - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/Yuan_XLAI ${DATAinput}/ + ln -sf ${FIXemis}/Yuan_XLAI ${DATAinput} fi if [ "${GEOS}" = "TRUE" ]; then #GEOS - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/GEOS_0.5x0.625 ${DATAinput}/ + ln -sf ${FIXemis}/GEOS_0.5x0.625 ${DATAinput} fi if [ "${AnnualScalar}" = "TRUE" ]; then #ANNUAL_SCALAR - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/AnnualScalar ${DATAinput}/ + ln -sf ${FIXemis}/AnnualScalar ${DATAinput} fi if [ "${MODIS_XLAI}" = "TRUE" ]; then #MODIS_XLAI - ln_vrfy -sf ${NEXUS_INPUT_BASE_DIR}/MODIS_XLAI ${DATAinput}/ + ln -sf ${FIXemis}/MODIS_XLAI ${DATAinput} +fi + +if [ "${OFFLINE_SOILNOX}" = "TRUE" ]; then #OFFLINE_SOILNOX + ln -sf ${FIXemis}/OFFLINE_SOILNOX ${DATAinput} fi if [ "${USE_GFS_SFC}" = "TRUE" ]; then # GFS INPUT - mkdir_vrfy -p ${DATAinput}/GFS_SFC - python3 ${ARL_NEXUS_DIR}/utils/python/nexus_gfs_bio.py -i ${DATA}/GFS_SFC/gfs.t??z.sfcf???.nc -o ${DATA}/GFS_SFC_MEGAN_INPUT.nc + mkdir -p ${DATAinput}/GFS_SFC + ${USHsrw}/nexus_utils/python/nexus_gfs_bio.py -i ${DATA}/GFS_SFC/gfs.t??z.sfcf???.nc -o ${DATA}/GFS_SFC_MEGAN_INPUT.nc export err=$? if [ $err -ne 0 ]; then message_txt="Call to python script \"nexus_gfs_bio.py\" failed." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi fi - # #---------------------------------------------------------------------- # @@ -322,18 +312,14 @@ fi # #----------------------------------------------------------------------- # -PREP_STEP -eval ${RUN_CMD_NEXUS} ${EXECdir}/nexus -c NEXUS_Config.rc -r grid_spec.nc -o NEXUS_Expt_split.nc ${REDIRECT_OUT_ERR} -export err=$? -if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_chk -else - if [ $err -ne 0 ]; then - print_err_msg_exit "Call to execute nexus standalone for the FV3LAM failed." - fi -fi -POST_STEP +export pgm="nexus" +. prep_step +eval ${RUN_CMD_NEXUS} ${EXECdir}/$pgm -c NEXUS_Config.rc -r grid_spec.nc -o NEXUS_Expt_split.nc >>$pgmout 2>${DATA}/errfile +export err=$?; err_chk +if [ $err -ne 0 ]; then + print_err_msg_exit "Call to execute nexus failed." +fi # #----------------------------------------------------------------------- # @@ -341,15 +327,12 @@ POST_STEP # #----------------------------------------------------------------------- # -python3 ${ARL_NEXUS_DIR}/utils/python/make_nexus_output_pretty.py --src ${DATA}/NEXUS_Expt_split.nc --grid ${DATA}/grid_spec.nc -o ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.${nspt}.nc -t ${DATA}/HEMCO_sa_Time.rc +${USHsrw}/nexus_utils/python/make_nexus_output_pretty.py --src ${DATA}/NEXUS_Expt_split.nc --grid ${DATA}/grid_spec.nc -o ${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.${nspt}.nc -t ${DATA}/HEMCO_sa_Time.rc export err=$? if [ $err -ne 0 ]; then message_txt="Call to python script \"make_nexus_output_pretty.py\" failed." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "wcoss2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi # #----------------------------------------------------------------------- diff --git a/scripts/exregional_nexus_gfs_sfc.sh b/scripts/exsrw_nexus_gfs_sfc.sh similarity index 66% rename from scripts/exregional_nexus_gfs_sfc.sh rename to scripts/exsrw_nexus_gfs_sfc.sh index c34d2c30ae..cadc27b89c 100755 --- a/scripts/exregional_nexus_gfs_sfc.sh +++ b/scripts/exsrw_nexus_gfs_sfc.sh @@ -7,8 +7,11 @@ # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_nexus_gfs_sfc" ${GLOBAL_VAR_DEFNS_FP} +. ${USHsrw}/source_util_funcs.sh +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -17,7 +20,7 @@ source_config_for_task "cpl_aqm_parm|task_nexus_gfs_sfc" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; set -xue; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -52,10 +55,10 @@ data files from disk or HPSS. # #----------------------------------------------------------------------- # -yyyymmdd=${GFS_SFC_CDATE:0:8} -yyyymm=${GFS_SFC_CDATE:0:6} -yyyy=${GFS_SFC_CDATE:0:4} -hh=${GFS_SFC_CDATE:8:2} +YYYYMMDD=${GFS_SFC_CDATE:0:8} +YYYYMM=${GFS_SFC_CDATE:0:6} +YYYY=${GFS_SFC_CDATE:0:4} +HH=${GFS_SFC_CDATE:8:2} if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then cyc_mod=$(( ${cyc} - ${DATE_FIRST_CYCL:8:2} )) @@ -70,10 +73,10 @@ fcst_len_hrs_offset=$(( FCST_LEN_HRS + TIME_OFFSET_HRS )) # #----------------------------------------------------------------------- # -GFS_SFC_TAR_DIR="${NEXUS_GFS_SFC_ARCHV_DIR}/rh${yyyy}/${yyyymm}/${yyyymmdd}" -GFS_SFC_TAR_SUB_DIR="gfs.${yyyymmdd}/${hh}/atmos" +GFS_SFC_TAR_DIR="${NEXUS_GFS_SFC_ARCHV_DIR}/rh${YYYY}/${YYYYMM}/${YYYYMMDD}" +GFS_SFC_TAR_SUB_DIR="gfs.${YYYYMMDD}/${HH}/atmos" -if [ "${DO_REAL_TIME}" = "TRUE" ]; then +if [ $(boolify "${DO_REAL_TIME}") = "TRUE" ]; then GFS_SFC_LOCAL_DIR="${COMINgfs}/${GFS_SFC_TAR_SUB_DIR}" else GFS_SFC_LOCAL_DIR="${NEXUS_GFS_SFC_DIR}/${GFS_SFC_TAR_SUB_DIR}" @@ -83,40 +86,28 @@ GFS_SFC_DATA_INTVL="3" # copy files from local directory if [ -d ${GFS_SFC_LOCAL_DIR} ]; then - gfs_sfc_fn="gfs.t${hh}z.sfcanl.nc" + gfs_sfc_fn="gfs.t${HH}z.sfcanl.nc" - relative_link_flag="FALSE" gfs_sfc_fp="${GFS_SFC_LOCAL_DIR}/${gfs_sfc_fn}" - create_symlink_to_file target="${gfs_sfc_fp}" symlink="${gfs_sfc_fn}" \ - relative="${relative_link_flag}" + ln -sf ${gfs_sfc_fp} ${DATA_SHARE}/${gfs_sfc_fn} for fhr in $(seq -f "%03g" 0 ${GFS_SFC_DATA_INTVL} ${fcst_len_hrs_offset}); do - gfs_sfc_fn="gfs.t${hh}z.sfcf${fhr}.nc" + gfs_sfc_fn="gfs.t${HH}z.sfcf${fhr}.nc" if [ -e "${GFS_SFC_LOCAL_DIR}/${gfs_sfc_fn}" ]; then gfs_sfc_fp="${GFS_SFC_LOCAL_DIR}/${gfs_sfc_fn}" - create_symlink_to_file target="${gfs_sfc_fp}" symlink="${gfs_sfc_fn}" \ - relative="${relative_link_flag}" + ln -nsf ${gfs_sfc_fp} ${DATA_SHARE}/${gfs_sfc_fn} else message_txt="SFC file for nexus emission for \"${cycle}\" does not exist in the directory: GFS_SFC_LOCAL_DIR = \"${GFS_SFC_LOCAL_DIR}\" gfs_sfc_fn = \"${gfs_sfc_fn}\"" - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - message_warning="WARNING: ${message_txt}" - print_info_msg "${message_warning}" - if [ ! -z "${maillist}" ]; then - echo "${message_warning}" | mail.py $maillist - fi - else - print_err_msg_exit "${message_txt}" - fi + print_err_msg_exit "${message_txt}" fi - done - + done # retrieve files from HPSS else - if [ "${yyyymmdd}" -lt "20220627" ]; then + if [ "${YYYYMMDD}" -lt "20220627" ]; then GFS_SFC_TAR_FN_VER="prod" - elif [ "${yyyymmdd}" -lt "20221129" ]; then + elif [ "${YYYYMMDD}" -lt "20221129" ]; then GFS_SFC_TAR_FN_VER="v16.2" else GFS_SFC_TAR_FN_VER="v16.3" @@ -126,63 +117,51 @@ else GFS_SFC_TAR_FN_SUFFIX_B="gfs_ncb.tar" # Check if the sfcanl file exists in the staging directory - gfs_sfc_tar_fn="${GFS_SFC_TAR_FN_PREFIX}.${yyyymmdd}_${hh}.${GFS_SFC_TAR_FN_SUFFIX_A}" + gfs_sfc_tar_fn="${GFS_SFC_TAR_FN_PREFIX}.${YYYYMMDD}_${HH}.${GFS_SFC_TAR_FN_SUFFIX_A}" gfs_sfc_tar_fp="${GFS_SFC_TAR_DIR}/${gfs_sfc_tar_fn}" - gfs_sfc_fns=("gfs.t${hh}z.sfcanl.nc") - gfs_sfc_fps="./${GFS_SFC_TAR_SUB_DIR}/gfs.t${hh}z.sfcanl.nc" + gfs_sfc_fns=("gfs.t${HH}z.sfcanl.nc") + gfs_sfc_fps="./${GFS_SFC_TAR_SUB_DIR}/gfs.t${HH}z.sfcanl.nc" if [ "${fcst_len_hrs_offset}" -lt "40" ]; then ARCHV_LEN_HRS="${fcst_len_hrs_offset}" else ARCHV_LEN_HRS="39" fi for fhr in $(seq -f "%03g" 0 ${GFS_SFC_DATA_INTVL} ${ARCHV_LEN_HRS}); do - gfs_sfc_fns+="gfs.t${hh}z.sfcf${fhr}.nc" - gfs_sfc_fps+=" ./${GFS_SFC_TAR_SUB_DIR}/gfs.t${hh}z.sfcf${fhr}.nc" + gfs_sfc_fns+="gfs.t${HH}z.sfcf${fhr}.nc" + gfs_sfc_fps+=" ./${GFS_SFC_TAR_SUB_DIR}/gfs.t${HH}z.sfcf${fhr}.nc" done # Retrieve data from A file up to fcst_len_hrs_offset=39 htar -tvf ${gfs_sfc_tar_fp} - PREP_STEP - htar -xvf ${gfs_sfc_tar_fp} ${gfs_sfc_fps} ${REDIRECT_OUT_ERR} + htar -xvf ${gfs_sfc_tar_fp} ${gfs_sfc_fps} export err=$? if [ $err -ne 0 ]; then message_txt="htar file reading operation (\"htar -xvf ...\") failed." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + print_err_msg_exit "${message_txt}" fi - POST_STEP # Retireve data from B file when fcst_len_hrs_offset>=40 if [ "${fcst_len_hrs_offset}" -ge "40" ]; then - gfs_sfc_tar_fn="${GFS_SFC_TAR_FN_PREFIX}.${yyyymmdd}_${hh}.${GFS_SFC_TAR_FN_SUFFIX_B}" + gfs_sfc_tar_fn="${GFS_SFC_TAR_FN_PREFIX}.${YYYYMMDD}_${HH}.${GFS_SFC_TAR_FN_SUFFIX_B}" gfs_sfc_tar_fp="${GFS_SFC_TAR_DIR}/${gfs_sfc_tar_fn}" gfs_sfc_fns=() gfs_sfc_fps="" for fhr in $(seq -f "%03g" 42 ${GFS_SFC_DATA_INTVL} ${fcst_len_hrs_offset}); do - gfs_sfc_fns+="gfs.t${hh}z.sfcf${fhr}.nc" - gfs_sfc_fps+=" ./${GFS_SFC_TAR_SUB_DIR}/gfs.t${hh}z.sfcf${fhr}.nc" + gfs_sfc_fns+="gfs.t${HH}z.sfcf${fhr}.nc" + gfs_sfc_fps+=" ./${GFS_SFC_TAR_SUB_DIR}/gfs.t${HH}z.sfcf${fhr}.nc" done htar -tvf ${gfs_sfc_tar_fp} - PREP_STEP - htar -xvf ${gfs_sfc_tar_fp} ${gfs_sfc_fps} ${REDIRECT_OUT_ERR} + htar -xvf ${gfs_sfc_tar_fp} ${gfs_sfc_fps} export err=$? if [ $err -ne 0 ]; then message_txt="htar file reading operation (\"htar -xvf ...\") failed." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + print_err_msg_exit "${message_txt}" fi - POST_STEP fi # Link retrieved files to staging directory - ln_vrfy -sf ${GFS_SFC_TAR_SUB_DIR}/gfs.*.nc . + ln -sf ${DATA}/${GFS_SFC_TAR_SUB_DIR}/gfs.*.nc ${DATA_SHARE} +fi -fi # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_nexus_post_split.sh b/scripts/exsrw_nexus_post_split.sh similarity index 71% rename from scripts/exregional_nexus_post_split.sh rename to scripts/exsrw_nexus_post_split.sh index 390e0dcce6..151e0a2ea5 100755 --- a/scripts/exregional_nexus_post_split.sh +++ b/scripts/exsrw_nexus_post_split.sh @@ -7,8 +7,11 @@ # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_nexus_post_split" ${GLOBAL_VAR_DEFNS_FP} +. ${USHsrw}/source_util_funcs.sh +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -17,7 +20,7 @@ source_config_for_task "cpl_aqm_parm|task_nexus_post_split" ${GLOBAL_VAR_DEFNS_F # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; sex -xue; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -42,7 +45,7 @@ print_info_msg " Entering script: \"${scrfunc_fn}\" In directory: \"${scrfunc_dir}\" -This is the ex-script for the task that runs NEXUS. +This is the ex-script for the task that runs NEXUS POST SPLIT. ========================================================================" # #----------------------------------------------------------------------- @@ -53,10 +56,10 @@ This is the ex-script for the task that runs NEXUS. # eval ${PRE_TASK_CMDS} -mm="${PDY:4:2}" -dd="${PDY:6:2}" -hh="${cyc}" -yyyymmdd="${PDY}" +YYYYMMDD="${PDY}" +MM="${PDY:4:2}" +DD="${PDY:6:2}" +HH="${cyc}" NUM_SPLIT_NEXUS=$( printf "%02d" ${NUM_SPLIT_NEXUS} ) @@ -65,9 +68,8 @@ if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then CYCLE_IDX=$(( ${cyc_mod} / ${INCR_CYCL_FREQ} )) FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]} fi -start_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC" "+%Y%m%d%H" ) -end_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${FCST_LEN_HRS} hours" "+%Y%m%d%H" ) - +start_date=${YYYYMMDD}${HH} +end_date=`$NDATE +${FCST_LEN_HRS} ${YYYYMMDD}${HH}` # #----------------------------------------------------------------------- # @@ -75,25 +77,21 @@ end_date=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${FCST_LEN_HRS} hou # #----------------------------------------------------------------------- # -cp_vrfy ${ARL_NEXUS_DIR}/config/cmaq/HEMCO_sa_Time.rc ${DATA}/HEMCO_sa_Time.rc +cp -p ${PARMsrw}/nexus_config/cmaq/HEMCO_sa_Time.rc ${DATA}/HEMCO_sa_Time.rc +cp -p ${FIXaqm}/nexus/${NEXUS_GRID_FN} ${DATA}/grid_spec.nc -cp_vrfy ${NEXUS_FIX_DIR}/${NEXUS_GRID_FN} ${DATA}/grid_spec.nc if [ "${NUM_SPLIT_NEXUS}" = "01" ]; then nspt="00" - cp_vrfy ${COMIN}/NEXUS/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.${nspt}.nc ${DATA}/NEXUS_Expt_combined.nc + cp -p ${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.${nspt}.nc ${DATA}/NEXUS_Expt_combined.nc else - python3 ${ARL_NEXUS_DIR}/utils/python/concatenate_nexus_post_split.py "${COMIN}/NEXUS/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.*.nc" "${DATA}/NEXUS_Expt_combined.nc" + ${USHsrw}/nexus_utils/python/concatenate_nexus_post_split.py "${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt_split.*.nc" "${DATA}/NEXUS_Expt_combined.nc" export err=$? if [ $err -ne 0 ]; then message_txt="Call to python script \"concatenate_nexus_post_split.py\" failed." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi fi - # #----------------------------------------------------------------------- # @@ -101,17 +99,13 @@ fi # #----------------------------------------------------------------------- # -python3 ${ARL_NEXUS_DIR}/utils/combine_ant_bio.py "${DATA}/NEXUS_Expt_combined.nc" ${DATA}/NEXUS_Expt.nc +${USHsrw}/nexus_utils/combine_ant_bio.py "${DATA}/NEXUS_Expt_combined.nc" ${DATA}/NEXUS_Expt.nc export err=$? if [ $err -ne 0 ]; then message_txt="Call to python script \"NEXUS_Expt_pretty.py\" failed." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi - # #----------------------------------------------------------------------- # @@ -119,7 +113,7 @@ fi # #----------------------------------------------------------------------- # -mv_vrfy ${DATA}/NEXUS_Expt.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt.nc +cp -p ${DATA}/NEXUS_Expt.nc ${COMOUT}/${NET}.${cycle}${dot_ensmem}.NEXUS_Expt.nc # # Print message indicating successful completion of script. # diff --git a/scripts/exregional_point_source.sh b/scripts/exsrw_point_source.sh similarity index 81% rename from scripts/exregional_point_source.sh rename to scripts/exsrw_point_source.sh index aeec8f3925..4cd693506c 100755 --- a/scripts/exregional_point_source.sh +++ b/scripts/exsrw_point_source.sh @@ -7,8 +7,12 @@ # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh -source_config_for_task "task_run_fcst|cpl_aqm_parm|task_point_source" ${GLOBAL_VAR_DEFNS_FP} +. ${USHsrw}/source_util_funcs.sh +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_point_source task_run_fcst ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -17,7 +21,7 @@ source_config_for_task "task_run_fcst|cpl_aqm_parm|task_point_source" ${GLOBAL_V # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; set -xue; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -59,16 +63,15 @@ if [ ${#FCST_LEN_CYCL[@]} -gt 1 ]; then FCST_LEN_HRS=${FCST_LEN_CYCL[$CYCLE_IDX]} fi nstep=$(( FCST_LEN_HRS+1 )) -yyyymmddhh="${PDY}${cyc}" - +YYYYMMDDHH="${PDY}${cyc}" # #----------------------------------------------------------------------- # -# Set the directories for CONUS/HI/AK +# Path to the point source data files # #----------------------------------------------------------------------- # -PT_SRC_PRECOMB="${DCOMINpt_src}" +PT_SRC_PRECOMB="${FIXemis}/${PT_SRC_SUBDIR}" # #----------------------------------------------------------------------- # @@ -76,22 +79,17 @@ PT_SRC_PRECOMB="${DCOMINpt_src}" # #----------------------------------------------------------------------- # -if [ ! -s "${DATA}/pt-${yyyymmddhh}.nc" ]; then - python3 ${HOMEdir}/sorc/AQM-utils/python_utils/stack-pt-merge.py -s ${yyyymmddhh} -n ${nstep} -i ${PT_SRC_PRECOMB} +if [ ! -s "${DATA}/pt-${YYYYMMDDHH}.nc" ]; then + ${USHsrw}/aqm_utils_python/stack-pt-merge.py -s ${YYYYMMDDHH} -n ${nstep} -i ${PT_SRC_PRECOMB} export err=$? if [ $err -ne 0 ]; then message_txt="Call to python script \"stack-pt-merge.py\" failed." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi + err_exit "${message_txt}" + print_err_msg_exit "${message_txt}" fi fi - # Move to COMIN -mv_vrfy ${DATA}/pt-${yyyymmddhh}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.PT.nc - +mv ${DATA}/pt-${YYYYMMDDHH}.nc ${COMOUT}/${NET}.${cycle}${dot_ensmem}.PT.nc # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_post_stat_o3.sh b/scripts/exsrw_post_stat_o3.sh similarity index 80% rename from scripts/exregional_post_stat_o3.sh rename to scripts/exsrw_post_stat_o3.sh index 94306d7336..dfcdd24ffa 100755 --- a/scripts/exregional_post_stat_o3.sh +++ b/scripts/exsrw_post_stat_o3.sh @@ -7,8 +7,12 @@ # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_o3" ${GLOBAL_VAR_DEFNS_FP} +. ${USHsrw}/source_util_funcs.sh +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -17,7 +21,7 @@ source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_o3" ${GLOBAL_V # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; set -xue; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -74,7 +78,7 @@ if [ "${PREDEF_GRID_NAME}" = "AQM_NA_13km" ]; then id_domain=793 fi -ln_vrfy -sf ${COMIN}/${NET}.${cycle}.chem_sfc.nc . +ln -sf ${COMIN}/${cyc}/${NET}.${cycle}.chem_sfc.nc . # cat >aqm_post.ini < filesize export XLFRTEOPTS="unit_vars=yes" @@ -145,18 +143,18 @@ for grid in 227 196 198;do export FORT12="filesize" export FORT31= export FORT51=awpaqm.${cycle}.${hr}ho3.${grid}.grib2 - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_ave_${hr}hr_o3-awpozcon.${cycle}.${grid} + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_ave_${hr}hr_o3-awpozcon.${cycle}.${grid} done for var in 1ho3 8ho3;do - cp_vrfy ${DATA}/${NET}.${cycle}.${var}*grib2 ${COMOUT} - cp_vrfy ${DATA}/awpaqm.${cycle}.${var}*grib2 ${COMOUTwmo} + cp ${DATA}/${NET}.${cycle}.${var}*grib2 ${COMOUT} + cp ${DATA}/awpaqm.${cycle}.${var}*grib2 ${COMOUTwmo} done for var in awpozcon;do - cp_vrfy ${DATA}/${NET}.${cycle}.${var}*grib2 ${COMOUT} + cp ${DATA}/${NET}.${cycle}.${var}*grib2 ${COMOUT} done else for var in 1ho3 awpozcon;do - cp_vrfy ${DATA}/${NET}.${cycle}.${var}*grib2 ${COMOUT} + cp ${DATA}/${NET}.${cycle}.${var}*grib2 ${COMOUT} done fi done @@ -166,7 +164,7 @@ done #------------------------------------------------------------ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then - ln_vrfy -sf ${COMIN}/${NET}.${cycle}.chem_sfc.nc a.nc + ln -sf ${COMIN}/${cyc}/${NET}.${cycle}.chem_sfc.nc a.nc export chk=1 export chk1=1 @@ -185,10 +183,10 @@ EOF1 ## 06z needs b.nc to find current day output from 04Z to 06Z if [ "${cyc}" = "06" ]; then - if [ -s ${COMIN}/../00/${NET}.t00z.chem_sfc.nc ]; then - ln_vrfy -s ${COMIN}/../00/${NET}.t00z.chem_sfc.nc b.nc + if [ -s ${COMIN}/00/${NET}.t00z.chem_sfc.nc ]; then + ln -s ${COMIN}/00/${NET}.t00z.chem_sfc.nc b.nc elif [ -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc ]; then - ln_vrfy -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc b.nc + ln -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc b.nc chk=0 else flag_run_bicor_max=no @@ -197,20 +195,20 @@ EOF1 if [ "${cyc}" = "12" ]; then ## 12z needs b.nc to find current day output from 04Z to 06Z - if [ -s ${COMIN}/../00/${NET}.t00z.chem_sfc.nc ]; then - ln_vrfy -s ${COMIN}/../00/${NET}.t00z.chem_sfc.nc b.nc + if [ -s ${COMIN}/00/${NET}.t00z.chem_sfc.nc ]; then + ln -s ${COMIN}/00/${NET}.t00z.chem_sfc.nc b.nc elif [ -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc ]; then - ln_vrfy -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc b.nc + ln -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc b.nc chk=0 else flag_run_bicor_max=no fi ## 12z needs c.nc to find current day output from 07Z to 12z - if [ -s ${COMIN}/../06/${NET}.t06z.chem_sfc.nc ]; then - ln_vrfy -s ${COMIN}/../06/${NET}.t06z.chem_sfc.nc c.nc + if [ -s ${COMIN}/06/${NET}.t06z.chem_sfc.nc ]; then + ln -s ${COMIN}/06/${NET}.t06z.chem_sfc.nc c.nc elif [ -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc ]; then - ln_vrfy -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc c.nc + ln -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc c.nc chk1=0 else flag_run_bicor_max=no @@ -220,13 +218,7 @@ EOF1 PREP_STEP eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_maxi_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR} export err=$? - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then err_chk - else - if [ $err -ne 0 ]; then - print_err_msg_exit "Call to executable to run AQM_POST_MAXI_GRIB2 returned with nonzero exit code." - fi - fi POST_STEP # split into max_1h and max_8h files and copy to grib227 @@ -234,7 +226,6 @@ EOF1 wgrib2 aqm-maxi.${id_domain}.grib2 |grep "OZMAX8" | wgrib2 -i aqm-maxi.${id_domain}.grib2 -grib ${NET}.${cycle}.max_8hr_o3.${id_domain}.grib2 grid227="lambert:265.0000:25.0000:25.0000 226.5410:1473:5079.000 12.1900:1025:5079.000" - #export grid148="lambert:263.0000:33.0000:45.0000 239.3720:442:12000.000 21.8210:265:12000.000" grid196="mercator:20.0000 198.4750:321:2500.000:206.1310 18.0730:255:2500.000:23.0880" grid198="nps:210.0000:60.0000 181.4290:825:5953.000 40.5300:553:5953.000" @@ -243,7 +234,7 @@ EOF1 wgrib2 ${NET}.${cycle}.max_8hr_o3.${id_domain}.grib2 -set_grib_type c3b -new_grid_winds earth -new_grid ${!gg} ${NET}.${cycle}.max_8hr_o3.${grid}.grib2 wgrib2 ${NET}.${cycle}.max_1hr_o3.${id_domain}.grib2 -set_grib_type c3b -new_grid_winds earth -new_grid ${!gg} ${NET}.${cycle}.max_1hr_o3.${grid}.grib2 - cp_vrfy ${DATA}/${NET}.${cycle}.max_*hr_o3.*.grib2 ${COMOUT} + cp ${DATA}/${NET}.${cycle}.max_*hr_o3.*.grib2 ${COMOUT} if [ "$SENDDBN" = "TRUE" ]; then ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUT}/${NET}.${cycle}.max_1hr_o3.${grid}.grib2 ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUT}/${NET}.${cycle}.max_8hr_o3.${grid}.grib2 @@ -257,24 +248,23 @@ EOF1 export FORT12="filesize" export FORT31= export FORT51=aqm-${hr}hro3-maxi.${grid}.grib2.temp - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm-${hr}hro3-maxi.${cycle}.${grid} + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm-${hr}hro3-maxi.${cycle}.${grid} echo `ls -l aqm-${hr}hro3-maxi.${grid}.grib2.temp | awk '{print $5} '` > filesize export XLFRTEOPTS="unit_vars=yes" export FORT11=aqm-${hr}hro3-maxi.${grid}.grib2.temp export FORT12="filesize" export FORT31= export FORT51=awpaqm.${cycle}.${hr}ho3-max.${grid}.grib2 - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm-${hr}hro3-maxi.${cycle}.${grid} + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm-${hr}hro3-maxi.${cycle}.${grid} done - cp_vrfy awpaqm.${cycle}.*o3-max.${grid}.grib2 ${COMOUTwmo} + cp awpaqm.${cycle}.*o3-max.${grid}.grib2 ${COMOUTwmo} if [ "${SENDDBN_NTC}" = "TRUE" ]; then ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.1ho3-max.${grid}.grib2 ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.8ho3-max.${grid}.grib2 fi done fi - # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_post_stat_pm25.sh b/scripts/exsrw_post_stat_pm25.sh similarity index 78% rename from scripts/exregional_post_stat_pm25.sh rename to scripts/exsrw_post_stat_pm25.sh index dc054b87a3..bdbf1fcbc5 100755 --- a/scripts/exregional_post_stat_pm25.sh +++ b/scripts/exsrw_post_stat_pm25.sh @@ -7,8 +7,12 @@ # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh -source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_pm25" ${GLOBAL_VAR_DEFNS_FP} +. ${USHsrw}/source_util_funcs.sh +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -17,7 +21,7 @@ source_config_for_task "cpl_aqm_parm|task_run_post|task_post_stat_pm25" ${GLOBAL # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; set -xue; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -62,7 +66,6 @@ else print_info_msg "$VERBOSE" " All executables will be submitted with command \'${RUN_CMD_SERIAL}\'." fi - # #----------------------------------------------------------------------- # @@ -79,7 +82,7 @@ fi # aqm_pm25_post #--------------------------------------------------------------- -ln_vrfy -sf ${COMIN}/${NET}.${cycle}.chem_sfc.nc . +ln -sf ${COMIN}/${cyc}/${NET}.${cycle}.chem_sfc.nc . cat >aqm_post.ini <> ${NET}.${cycle}.1hpm25.${id_domain}.grib2 @@ -115,7 +112,7 @@ for grid in 227 196 198; do wgrib2 ${NET}.${cycle}.1hpm25.${id_domain}.grib2 -set_grib_type c3b -new_grid_winds earth -new_grid ${!gg} ${NET}.${cycle}.1hpm25.${grid}.grib2 done -cp_vrfy ${DATA}/${NET}.${cycle}*pm25*.grib2 ${COMOUT} +cp ${DATA}/${NET}.${cycle}*pm25*.grib2 ${COMOUT} # Create AWIPS GRIB2 data for Bias-Corrected PM2.5 if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then @@ -126,7 +123,7 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT12="filesize" export FORT31= export FORT51=${NET}.${cycle}.1hpm25.${grid}.grib2.temp - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_1hpm25.${cycle}.${grid} + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_1hpm25.${cycle}.${grid} echo `ls -l ${NET}.${cycle}.grib2_pm25.${grid}.temp | awk '{print $5} '` > filesize export XLFRTEOPTS="unit_vars=yes" @@ -134,16 +131,16 @@ if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then export FORT12="filesize" export FORT31= export FORT51=awpaqm.${cycle}.1hpm25.${grid}.grib2 - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_1hpm25.${cycle}.${grid} + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_1hpm25.${cycle}.${grid} # Post Files to COMOUTwmo - cp_vrfy awpaqm.${cycle}.1hpm25.${grid}.grib2 ${COMOUTwmo} + cp awpaqm.${cycle}.1hpm25.${grid}.grib2 ${COMOUTwmo} # Distribute Data - if [ "${SENDDBN_NTC}" = "TRUE" ] ; then - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.1hpm25.${grid}.grib2 - ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 - fi +# if [ "${SENDDBN_NTC}" = "TRUE" ] ; then +# ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.1hpm25.${grid}.grib2 +# ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 +# fi done fi @@ -152,7 +149,7 @@ fi #--------------------------------------------------------------- if [ "${cyc}" = "06" ] || [ "${cyc}" = "12" ]; then - ln_vrfy -sf ${COMIN}/${NET}.${cycle}.chem_sfc.nc a.nc + ln -sf ${COMIN}/${cyc}/${NET}.${cycle}.chem_sfc.nc a.nc export chk=1 export chk1=1 @@ -170,10 +167,10 @@ EOF1 flag_run_bicor_max=yes # 06z needs b.nc to find current day output from 04Z to 06Z if [ "${cyc}" = "06" ]; then - if [ -s ${COMIN}/../00/${NET}.t00z.chem_sfc.nc ]; then - ln_vrfy -sf ${COMIN}/../00/${NET}.t00z.chem_sfc.nc b.nc + if [ -s ${COMIN}/00/${NET}.t00z.chem_sfc.nc ]; then + ln -sf ${COMIN}/00/${NET}.t00z.chem_sfc.nc b.nc elif [ -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc ]; then - ln_vrfy -sf ${COMINm1}/12/${NET}.t12z.chem_sfc.nc b.nc + ln -sf ${COMINm1}/12/${NET}.t12z.chem_sfc.nc b.nc chk=0 else flag_run_bicor_max=no @@ -182,20 +179,20 @@ EOF1 if [ "${cyc}" = "12" ]; then # 12z needs b.nc to find current day output from 04Z to 06Z - if [ -s ${COMIN}/../00/${NET}.t00z.chem_sfc.nc ]; then - ln_vrfy -sf ${COMIN}/../00/${NET}.t00z.chem_sfc.nc b.nc + if [ -s ${COMIN}/00/${NET}.t00z.chem_sfc.nc ]; then + ln -sf ${COMIN}/00/${NET}.t00z.chem_sfc.nc b.nc elif [ -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc ]; then - ln_vrfy -sf ${COMINm1}/12/${NET}.${PDYm1}.t12z.chem_sfc.nc b.nc + ln -sf ${COMINm1}/12/${NET}.${PDYm1}.t12z.chem_sfc.nc b.nc chk=0 else flag_run_bicor_max=no fi # 12z needs c.nc to find current day output from 07Z to 12z - if [ -s ${COMIN}/../06/${NET}.t06z.chem_sfc.nc ]; then - ln_vrfy -sf ${COMIN}/../06/${NET}.t06z.chem_sfc.nc c.nc + if [ -s ${COMIN}/06/${NET}.t06z.chem_sfc.nc ]; then + ln -sf ${COMIN}/06/${NET}.t06z.chem_sfc.nc c.nc elif [ -s ${COMINm1}/12/${NET}.t12z.chem_sfc.nc ]; then - ln_vrfy -sf ${COMINm1}/12/${NET}.t12z.chem_sfc.nc c.nc + ln -sf ${COMINm1}/12/${NET}.t12z.chem_sfc.nc c.nc chk1=0 else flag_run_bicor_max=no @@ -205,13 +202,7 @@ EOF1 PREP_STEP eval ${RUN_CMD_SERIAL} ${EXECdir}/aqm_post_maxi_grib2 ${PDY} ${cyc} ${chk} ${chk1} ${REDIRECT_OUT_ERR} export err=$? - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then err_chk - else - if [ $err -ne 0 ]; then - print_err_msg_exit "Call to executable to run AQM_POST_MAXI_GRIB2 returned with nonzero exit code." - fi - fi POST_STEP wgrib2 ${NET}_pm25_24h_ave.${id_domain}.grib2 |grep "PMTF" | wgrib2 -i ${NET}_pm25_24h_ave.${id_domain}.grib2 -grib ${NET}.${cycle}.ave_24hr_pm25.${id_domain}.grib2 @@ -228,14 +219,14 @@ EOF1 wgrib2 ${NET}.${cycle}.max_1hr_pm25.${id_domain}.grib2 -set_grib_type c3b -new_grid_winds earth -new_grid ${!gg} ${NET}.${cycle}.1hpm25-max.${grid}.grib2 # Add WMO header for daily 1h PM2.5 and 24hr_ave PM2.5 - rm_vrfy -f filesize + rm -f filesize echo 0 > filesize export XLFRTEOPTS="unit_vars=yes" export FORT11=${NET}.${cycle}.1hpm25-max.${grid}.grib2 export FORT12="filesize" export FORT31= export FORT51=${NET}.${cycle}.max_1hr_pm25.${grid}.grib2.temp - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_max_1hr_pm25.${cycle}.${grid} + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_max_1hr_pm25.${cycle}.${grid} echo `ls -l ${NET}.${cycle}.max_1hr_pm25.${grid}.grib2.temp | awk '{print $5} '` > filesize export XLFRTEOPTS="unit_vars=yes" @@ -243,16 +234,16 @@ EOF1 export FORT12="filesize" export FORT31= export FORT51=awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_max_1hr_pm25.${cycle}.${grid} + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_max_1hr_pm25.${cycle}.${grid} - rm_vrfy -f filesize + rm -f filesize echo 0 > filesize export XLFRTEOPTS="unit_vars=yes" export FORT11=${NET}.${cycle}.24hrpm25-ave.${grid}.grib2 export FORT12="filesize" export FORT31= export FORT51=${NET}.${cycle}.24hrpm25-ave.${grid}.grib2.temp - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_ave_24hrpm25_awp.${cycle}.${grid} + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_ave_24hrpm25_awp.${cycle}.${grid} echo `ls -l ${NET}.${cycle}.24hrpm25-ave.${grid}.grib2.temp | awk '{print $5} '` > filesize export XLFRTEOPTS="unit_vars=yes" @@ -260,20 +251,28 @@ EOF1 export FORT12="filesize" export FORT31= export FORT51=awpaqm.${cycle}.24hr-pm25-ave.${grid}.grib2 - tocgrib2super < ${PARMaqm_utils}/wmo/grib2_aqm_ave_24hrpm25_awp.${cycle}.${grid} + tocgrib2super < ${PARMdir}/aqm_utils/wmo/grib2_aqm_ave_24hrpm25_awp.${cycle}.${grid} - cp_vrfy ${DATA}/${NET}.${cycle}.ave_24hr_pm25*.grib2 ${COMOUT} - cp_vrfy ${DATA}/${NET}.${cycle}.max_1hr_pm25*.grib2 ${COMOUT} - cp_vrfy awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 ${COMOUTwmo} - cp_vrfy awpaqm.${cycle}.24hr-pm25-ave.${grid}.grib2 ${COMOUTwmo} + cp ${DATA}/${NET}.${cycle}.ave_24hr_pm25*.grib2 ${COMOUT} + cp ${DATA}/${NET}.${cycle}.max_1hr_pm25*.grib2 ${COMOUT} + cp awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 ${COMOUTwmo} + cp awpaqm.${cycle}.24hr-pm25-ave.${grid}.grib2 ${COMOUTwmo} + + ############################## + # Distribute Data + ############################## + + if [ "${SENDDBN_NTC}" = "TRUE" ] ; then + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.1hpm25.${grid}.grib2 + ${DBNROOT}/bin/dbn_alert ${DBNALERT_TYPE} ${NET} ${job} ${COMOUTwmo}/awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 + fi if [ "$SENDDBN" = "TRUE" ]; then - ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUTwmo}/${NET}.${cycle}.ave_24hr_pm25.${grid}.grib2 - ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUTwmo}/${NET}.${cycle}.max_1hr_pm25.${grid}.grib2 + ${DBNROOT}/bin/dbn_alert MODEL AQM_PM ${job} ${COMOUTwmo}/awpaqm.${cycle}.24hr-pm25-ave.${grid}.grib2 + ${DBNROOT}/bin/dbn_alert MODEL AQM_MAX ${job} ${COMOUTwmo}/awpaqm.${cycle}.daily-1hr-pm25-max.${grid}.grib2 fi done fi - # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_pre_post_stat.sh b/scripts/exsrw_pre_post_stat.sh similarity index 78% rename from scripts/exregional_pre_post_stat.sh rename to scripts/exsrw_pre_post_stat.sh index 44f4637684..f6ec6a9a7d 100755 --- a/scripts/exregional_pre_post_stat.sh +++ b/scripts/exsrw_pre_post_stat.sh @@ -7,8 +7,12 @@ # #----------------------------------------------------------------------- # -. $USHdir/source_util_funcs.sh -source_config_for_task "task_pre_post|task_run_post" ${GLOBAL_VAR_DEFNS_FP} +. ${USHsrw}/source_util_funcs.sh +for sect in user nco platform workflow nco global verification cpl_aqm_parm \ + constants fixed_files grid_params \ + task_run_post ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done # #----------------------------------------------------------------------- # @@ -17,7 +21,7 @@ source_config_for_task "task_pre_post|task_run_post" ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 +{ save_shell_opts; set -xue; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -63,11 +67,11 @@ ist=1 while [ "$ist" -le "${FCST_LEN_HRS}" ]; do hst=$( printf "%03d" "${ist}" ) - rm_vrfy -f ${DATA}/tmp*nc - rm_vrfy -f ${DATA}/${NET}.${cycle}.chem_sfc_f${hst}*nc - rm_vrfy -f ${DATA}/${NET}.${cycle}.met_sfc_f${hst}*nc + rm -f ${DATA}/tmp*nc + rm -f ${DATA}/${NET}.${cycle}.chem_sfc_f${hst}*nc + rm -f ${DATA}/${NET}.${cycle}.met_sfc_f${hst}*nc - ncks -v lat,lon,o3_ave,no_ave,no2_ave,pm25_ave -d pfull,63,63 ${COMIN}/${NET}.${cycle}.dyn.f${hst}.nc ${DATA}/tmp2a.nc + ncks -v lat,lon,o3_ave,no_ave,no2_ave,pm25_ave -d pfull,63,63 ${DATA_SHARE}/${NET}.${cycle}.dyn.f${hst}.nc ${DATA}/tmp2a.nc ncks -C -O -x -v pfull ${DATA}/tmp2a.nc ${DATA}/tmp2b.nc @@ -75,11 +79,11 @@ while [ "$ist" -le "${FCST_LEN_HRS}" ]; do ncrename -v o3_ave,o3 -v no_ave,no -v no2_ave,no2 -v pm25_ave,PM25_TOT ${DATA}/tmp2c.nc - mv_vrfy ${DATA}/tmp2c.nc ${DATA}/${NET}.${cycle}.chem_sfc.f${hst}.nc + mv ${DATA}/tmp2c.nc ${DATA}/${NET}.${cycle}.chem_sfc.f${hst}.nc - ncks -v dswrf,hpbl,tmp2m,ugrd10m,vgrd10m,spfh2m ${COMIN}/${NET}.${cycle}.phy.f${hst}.nc ${DATA}/${NET}.${cycle}.met_sfc.f${hst}.nc + ncks -v dswrf,hpbl,tmp2m,ugrd10m,vgrd10m,spfh2m ${DATA_SHARE}/${NET}.${cycle}.phy.f${hst}.nc ${DATA}/${NET}.${cycle}.met_sfc.f${hst}.nc - ncks -v aod ${COMIN}/${NET}.${cycle}.phy.f${hst}.nc ${DATA}/${NET}.${cycle}.aod.f${hst}.nc + ncks -v aod ${DATA_SHARE}/${NET}.${cycle}.phy.f${hst}.nc ${DATA}/${NET}.${cycle}.aod.f${hst}.nc (( ist=ist+1 )) done @@ -101,7 +105,6 @@ while [ "${ist}" -le "${FCST_LEN_HRS}" ]; do done ncecat ${DATA}/${NET}.${cycle}.chem_sfc.f*.nc ${DATA}/${NET}.${cycle}.chem_sfc.nc - # #----------------------------------------------------------------------- # @@ -109,10 +112,10 @@ ncecat ${DATA}/${NET}.${cycle}.chem_sfc.f*.nc ${DATA}/${NET}.${cycle}.chem_sfc. # #----------------------------------------------------------------------- # -mv_vrfy ${DATA}/${NET}.${cycle}.met_sfc.f*.nc ${COMIN} -mv_vrfy ${DATA}/${NET}.${cycle}.chem_sfc.f*.nc ${COMIN} -mv_vrfy ${DATA}/${NET}.${cycle}.chem_sfc.nc ${COMIN} -mv_vrfy ${DATA}/${NET}.${cycle}.aod.f*.nc ${COMIN} +mv ${DATA}/${NET}.${cycle}.met_sfc.f*.nc ${COMOUT} +mv ${DATA}/${NET}.${cycle}.chem_sfc.f*.nc ${COMOUT} +mv ${DATA}/${NET}.${cycle}.chem_sfc.nc ${COMOUT} +mv ${DATA}/${NET}.${cycle}.aod.f*.nc ${COMOUT} # #----------------------------------------------------------------------- # diff --git a/tests/WE2E/machine_suites/comprehensive b/tests/WE2E/machine_suites/comprehensive index 7fdb30046a..8397e5d0c0 100644 --- a/tests/WE2E/machine_suites/comprehensive +++ b/tests/WE2E/machine_suites/comprehensive @@ -1,4 +1,12 @@ 2020_CAD +2020_CAPE +2019_hurricane_barry +2019_halloween_storm +2019_hurricane_lorenzo +2019_memorial_day_heat_wave +2020_denver_radiation_inversion +2020_easter_storm +2020_jan_cold_blast community custom_ESGgrid custom_ESGgrid_Central_Asia_3km @@ -49,6 +57,7 @@ grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta grid_RRFS_CONUScompact_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta +grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta grid_RRFS_CONUScompact_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta @@ -63,12 +72,6 @@ MET_ensemble_verification_only_vx MET_ensemble_verification_only_vx_time_lag MET_ensemble_verification_winter_wx MET_verification_only_vx -nco -nco_ensemble -nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 -nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16 -nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km -nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR pregen_grid_orog_sfc_climo specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS specify_template_filenames diff --git a/tests/WE2E/machine_suites/comprehensive.cheyenne b/tests/WE2E/machine_suites/comprehensive.cheyenne index e518e0c4cb..96792e37b0 100644 --- a/tests/WE2E/machine_suites/comprehensive.cheyenne +++ b/tests/WE2E/machine_suites/comprehensive.cheyenne @@ -48,12 +48,6 @@ grid_SUBCONUS_Ind_3km_ics_NAM_lbcs_NAM_suite_GFS_v16 grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot MET_ensemble_verification_only_vx MET_verification_only_vx -nco -nco_ensemble -nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 -nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16 -nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km -nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR pregen_grid_orog_sfc_climo specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS specify_template_filenames diff --git a/tests/WE2E/machine_suites/comprehensive.derecho b/tests/WE2E/machine_suites/comprehensive.derecho index 5bf5b4db24..5464a053d8 100644 --- a/tests/WE2E/machine_suites/comprehensive.derecho +++ b/tests/WE2E/machine_suites/comprehensive.derecho @@ -1,4 +1,12 @@ 2020_CAD +2020_CAPE +2019_hurricane_barry +2019_halloween_storm +2019_hurricane_lorenzo +2019_memorial_day_heat_wave +2020_denver_radiation_inversion +2020_easter_storm +2020_jan_cold_blast community custom_ESGgrid #custom_ESGgrid_Central_Asia_3km @@ -40,6 +48,7 @@ grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta grid_RRFS_CONUScompact_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta +grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta grid_RRFS_CONUScompact_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta @@ -52,12 +61,6 @@ grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot MET_ensemble_verification_only_vx MET_ensemble_verification_winter_wx MET_verification_only_vx -nco -nco_ensemble -nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 -nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16 -nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km -nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR pregen_grid_orog_sfc_climo specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS specify_template_filenames diff --git a/tests/WE2E/machine_suites/comprehensive.gaea-c5 b/tests/WE2E/machine_suites/comprehensive.gaea similarity index 100% rename from tests/WE2E/machine_suites/comprehensive.gaea-c5 rename to tests/WE2E/machine_suites/comprehensive.gaea diff --git a/tests/WE2E/machine_suites/comprehensive.noaacloud b/tests/WE2E/machine_suites/comprehensive.noaacloud index d44160244a..c9bb96ae64 100644 --- a/tests/WE2E/machine_suites/comprehensive.noaacloud +++ b/tests/WE2E/machine_suites/comprehensive.noaacloud @@ -37,6 +37,7 @@ grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta grid_RRFS_CONUScompact_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta +grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta grid_RRFS_CONUScompact_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta @@ -49,16 +50,18 @@ grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot MET_ensemble_verification_only_vx MET_ensemble_verification_winter_wx MET_verification_only_vx -nco -nco_ensemble -nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 -nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16 -nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km -nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR pregen_grid_orog_sfc_climo specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS specify_template_filenames 2020_CAD +2020_CAPE +2019_hurricane_barry +2019_halloween_storm +2019_hurricane_lorenzo +2019_memorial_day_heat_wave +2020_denver_radiation_inversion +2020_easter_storm +2020_jan_cold_blast get_from_AWS_ics_GEFS_lbcs_GEFS_fmt_grib2_2022040400_ensemble_2mems get_from_NOMADS_ics_FV3GFS_lbcs_FV3GFS long_fcst diff --git a/tests/WE2E/machine_suites/comprehensive.orion b/tests/WE2E/machine_suites/comprehensive.orion index 671756e294..5930843582 100644 --- a/tests/WE2E/machine_suites/comprehensive.orion +++ b/tests/WE2E/machine_suites/comprehensive.orion @@ -1,4 +1,12 @@ 2020_CAD +2020_CAPE +2019_hurricane_barry +2019_halloween_storm +2019_hurricane_lorenzo +2019_memorial_day_heat_wave +2020_denver_radiation_inversion +2020_easter_storm +2020_jan_cold_blast community custom_ESGgrid custom_ESGgrid_Central_Asia_3km @@ -40,6 +48,7 @@ grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta grid_RRFS_CONUScompact_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta +grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta grid_RRFS_CONUScompact_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta @@ -52,12 +61,6 @@ grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot MET_ensemble_verification_only_vx MET_ensemble_verification_winter_wx MET_verification_only_vx -nco -nco_ensemble -nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 -nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16 -nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km -nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR pregen_grid_orog_sfc_climo specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS specify_template_filenames diff --git a/tests/WE2E/machine_suites/coverage.cheyenne b/tests/WE2E/machine_suites/coverage.cheyenne index 19bbc623c7..8f3c3ec78c 100644 --- a/tests/WE2E/machine_suites/coverage.cheyenne +++ b/tests/WE2E/machine_suites/coverage.cheyenne @@ -4,6 +4,5 @@ grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16 grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_HRRR grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_HRRR_suite_HRRR -nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16 pregen_grid_orog_sfc_climo specify_template_filenames diff --git a/tests/WE2E/machine_suites/coverage.derecho b/tests/WE2E/machine_suites/coverage.derecho index 19bbc623c7..a948c76033 100644 --- a/tests/WE2E/machine_suites/coverage.derecho +++ b/tests/WE2E/machine_suites/coverage.derecho @@ -4,6 +4,8 @@ grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16 grid_RRFS_CONUScompact_13km_ics_HRRR_lbcs_RAP_suite_HRRR grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_HRRR_suite_HRRR -nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16 pregen_grid_orog_sfc_climo specify_template_filenames +2019_hurricane_barry +2019_memorial_day_heat_wave +2020_denver_radiation_inversion diff --git a/tests/WE2E/machine_suites/coverage.gaea-c5 b/tests/WE2E/machine_suites/coverage.gaea similarity index 83% rename from tests/WE2E/machine_suites/coverage.gaea-c5 rename to tests/WE2E/machine_suites/coverage.gaea index 4ff7f61f3c..970fdf4086 100644 --- a/tests/WE2E/machine_suites/coverage.gaea-c5 +++ b/tests/WE2E/machine_suites/coverage.gaea @@ -7,5 +7,5 @@ grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_HRRR_suite_HRRR grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot -nco_ensemble -nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km +2020_CAPE +2020_easter_storm diff --git a/tests/WE2E/machine_suites/coverage.hera.gnu.com b/tests/WE2E/machine_suites/coverage.hera.gnu.com index be66d82fb8..c2018a6e78 100644 --- a/tests/WE2E/machine_suites/coverage.hera.gnu.com +++ b/tests/WE2E/machine_suites/coverage.hera.gnu.com @@ -7,4 +7,5 @@ grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_RAP_suite_WoFS_v0 long_fcst MET_verification_only_vx MET_ensemble_verification_only_vx_time_lag -nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 +2019_halloween_storm +2020_jan_cold_blast diff --git a/tests/WE2E/machine_suites/coverage.hercules b/tests/WE2E/machine_suites/coverage.hercules index 273de3108e..ec37d81a56 100644 --- a/tests/WE2E/machine_suites/coverage.hercules +++ b/tests/WE2E/machine_suites/coverage.hercules @@ -9,3 +9,4 @@ grid_RRFS_NA_13km_ics_FV3GFS_lbcs_FV3GFS_suite_RAP grid_SUBCONUS_Ind_3km_ics_NAM_lbcs_NAM_suite_GFS_v16 MET_verification_only_vx specify_EXTRN_MDL_SYSBASEDIR_ICS_LBCS +2019_hurricane_lorenzo diff --git a/tests/WE2E/machine_suites/coverage.jet b/tests/WE2E/machine_suites/coverage.jet index a01d095828..53308090b1 100644 --- a/tests/WE2E/machine_suites/coverage.jet +++ b/tests/WE2E/machine_suites/coverage.jet @@ -9,4 +9,3 @@ grid_RRFS_AK_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2 grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta -nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR diff --git a/tests/WE2E/machine_suites/coverage.orion b/tests/WE2E/machine_suites/coverage.orion index dd13f27318..5cb4441437 100644 --- a/tests/WE2E/machine_suites/coverage.orion +++ b/tests/WE2E/machine_suites/coverage.orion @@ -5,8 +5,8 @@ grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta grid_RRFS_AK_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_RRFS_v1beta grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR +grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta grid_RRFS_CONUScompact_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_RRFS_CONUScompact_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16 grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0 -nco 2020_CAD diff --git a/tests/WE2E/machine_suites/fundamental b/tests/WE2E/machine_suites/fundamental index 858a442253..09d9482c7d 100644 --- a/tests/WE2E/machine_suites/fundamental +++ b/tests/WE2E/machine_suites/fundamental @@ -4,8 +4,6 @@ # Test RRFS_CONUScompact_25km grid, HRRR ics, RAP lbcs, RRFS_v1beta suite grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta -# Test grid_RRFS_CONUS_25km in NCO mode with FV3GFS bcs (6hr time offset), FV3_GFS_v16 suite -nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16 # Test grid_RRFS_CONUS_25km grid, FV3GFS bcs, inline post, GFS_v15p2 suite grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15p2 # Test grid_RRFS_CONUS_25km grid, FV3GFS bcs, restart files, GFS_v17_p8 suite diff --git a/tests/WE2E/run_WE2E_tests.py b/tests/WE2E/run_WE2E_tests.py index 5c720e7d93..5d4bd81105 100755 --- a/tests/WE2E/run_WE2E_tests.py +++ b/tests/WE2E/run_WE2E_tests.py @@ -96,12 +96,6 @@ def run_we2e_tests(homedir, args) -> None: logging.debug(f'{testfilename} exists for this platform and run_envir'\ 'has not been specified\n'\ 'Setting run_envir = {run_envir} for all tests') - else: - if not run_envir: - run_envir = 'nco' - logging.debug(f'{testfilename} exists for this platform and run_envir has'\ - 'not been specified\n'\ - 'Setting run_envir = {run_envir} for all tests') logging.debug(f"Reading test file: {testfilename}") with open(testfilename, encoding="utf-8") as f: tests_to_check = [x.rstrip() for x in f] @@ -175,14 +169,6 @@ def run_we2e_tests(homedir, args) -> None: test_cfg['user'].update({"ACCOUNT": args.account}) if run_envir: test_cfg['user'].update({"RUN_ENVIR": run_envir}) - if run_envir == "nco": - if 'nco' not in test_cfg: - test_cfg['nco'] = dict() - test_cfg['nco'].update({"model_ver_default": "we2e"}) - if args.opsroot: - if 'nco' not in test_cfg: - test_cfg['nco'] = dict() - test_cfg['nco'].update({"OPSROOT_default": args.opsroot}) # if platform section was not in input config, initialize as empty dict if 'platform' not in test_cfg: test_cfg['platform'] = dict() @@ -224,10 +210,6 @@ def run_we2e_tests(homedir, args) -> None: logging.debug(f'Setting {obvar} = {mach_path} from machine file') test_cfg['platform'][obvar] = mach_path - if 'cpl_aqm_parm' in test_cfg: - test_aqm_input_basedir = machine_defaults['platform']['TEST_AQM_INPUT_BASEDIR'] - test_cfg['cpl_aqm_parm']['DCOMINfire_default'] = f"{test_aqm_input_basedir}/RAVE_fire" - if args.compiler == "gnu": # 2D decomposition doesn't work with GNU compilers. Deactivate 2D decomposition for GNU if 'task_run_post' in test_cfg: @@ -533,9 +515,6 @@ def setup_logging(logfile: str = "log.run_WE2E_tests", debug: bool = False) -> N help='DEPRECATED; DO NOT USE. See "launch" option.') ap.add_argument('--cron_relaunch_intvl_mnts', type=int, help='Overrides CRON_RELAUNCH_INTVL_MNTS for all experiments') - ap.add_argument('--opsroot', type=str, - help='If test is for NCO mode, sets OPSROOT_default (see config_defaults.yaml'\ - 'for more details on this variable)') ap.add_argument('--print_test_info', action='store_true', help='Create a "WE2E_test_info.txt" file summarizing each test prior to'\ 'starting experiment') diff --git a/tests/WE2E/setup_WE2E_tests.sh b/tests/WE2E/setup_WE2E_tests.sh index 0644102c06..8fa0977af7 100755 --- a/tests/WE2E/setup_WE2E_tests.sh +++ b/tests/WE2E/setup_WE2E_tests.sh @@ -45,7 +45,7 @@ function usage { } -machines=( hera jet cheyenne derecho orion wcoss2 gaea-c5 odin singularity macos noaacloud ) +machines=( hera jet cheyenne derecho orion wcoss2 gaea odin singularity macos noaacloud ) if [ "$1" = "-h" ] ; then usage ; fi [[ $# -le 2 ]] && usage @@ -80,6 +80,7 @@ export HOME=$homedir source ../../ush/load_modules_wflow.sh ${machine} # Run the E2E Workflow tests +[[ ${tests} = none ]] && echo "none" || \ ./run_WE2E_tests.py \ --machine=${machine} \ --account=${account} \ diff --git a/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml b/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml index 789c5e9674..2901d1ebf1 100644 --- a/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml +++ b/tests/WE2E/test_configs/aqm/config.aqm_grid_AQM_NA13km_suite_GFS_v16.yaml @@ -5,13 +5,10 @@ user: workflow: PREDEF_GRID_NAME: AQM_NA_13km CCPP_PHYS_SUITE: FV3_GFS_v16 - DATE_FIRST_CYCL: '2023021700' - DATE_LAST_CYCL: '2023021706' - INCR_CYCL_FREQ: 6 - FCST_LEN_HRS: -1 - FCST_LEN_CYCL: - - 6 - - 12 + DATE_FIRST_CYCL: '2023111000' + DATE_LAST_CYCL: '2023111100' + INCR_CYCL_FREQ: 24 + FCST_LEN_HRS: 24 PREEXISTING_DIR_METHOD: rename DIAG_TABLE_TMPL_FN: diag_table_aqm.FV3_GFS_v16 FIELD_TABLE_TMPL_FN: field_table_aqm.FV3_GFS_v16 @@ -20,11 +17,11 @@ nco: NET_default: aqm rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/aqm_prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/aqm_prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/test.yaml"]|include }}' task_aqm_ics_ext: metatask_run_ensemble: task_run_fcst_mem#mem#: - walltime: 02:00:00 + walltime: 01:20:00 task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS FV3GFS_FILE_FMT_ICS: netcdf @@ -41,7 +38,7 @@ task_run_fcst: LAYOUT_X: 50 LAYOUT_Y: 34 BLOCKSIZE: 16 - RESTART_INTERVAL: 6 + RESTART_INTERVAL: 12 24 QUILTING: true PRINT_ESMF: false DO_FCST_RESTART: false diff --git a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_Great_Lakes_snow_8km.yaml b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_Great_Lakes_snow_8km.yaml index 981c736239..d773c632e2 100644 --- a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_Great_Lakes_snow_8km.yaml +++ b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_Great_Lakes_snow_8km.yaml @@ -15,7 +15,7 @@ workflow: PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml", "parm/wflow/test.yaml"]|include }}' metatask_run_ensemble: task_run_fcst_mem#mem#: walltime: 01:00:00 diff --git a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_SF_1p1km.yaml b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_SF_1p1km.yaml index 6d9e2e0d6d..867b4675a0 100644 --- a/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_SF_1p1km.yaml +++ b/tests/WE2E/test_configs/custom_grids/config.custom_ESGgrid_SF_1p1km.yaml @@ -57,7 +57,7 @@ rocoto: tasks: metatask_run_ensemble: task_run_fcst_mem#mem#: - walltime: 01:00:00 + walltime: 02:30:00 task_make_ics_mem#mem#: nnodes: 16 ppn: 12 diff --git a/tests/WE2E/test_configs/default_configs/config.nco.yaml b/tests/WE2E/test_configs/default_configs/config.nco.yaml deleted file mode 120000 index 690636fd63..0000000000 --- a/tests/WE2E/test_configs/default_configs/config.nco.yaml +++ /dev/null @@ -1 +0,0 @@ -../../../../ush/config.nco.yaml \ No newline at end of file diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml index f4c40bf722..de456cea73 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_CONUS_3km_GFDLgrid_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml @@ -20,3 +20,5 @@ task_get_extrn_lbcs: EXTRN_MDL_NAME_LBCS: FV3GFS LBC_SPEC_INTVL_HRS: 3 USE_USER_STAGED_EXTRN_FILES: true +task_run_fcst: + OMP_NUM_THREADS_RUN_FCST: 3 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml index 7bc578b203..95b57b0aa5 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml @@ -14,7 +14,7 @@ workflow: PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml", "parm/wflow/test.yaml"]|include }}' task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS USE_USER_STAGED_EXTRN_FILES: true diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR.yaml index 6d4dbc3b33..4a340185f3 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_AK_3km_ics_FV3GFS_lbcs_FV3GFS_suite_HRRR.yaml @@ -19,3 +19,5 @@ task_get_extrn_lbcs: EXTRN_MDL_NAME_LBCS: FV3GFS LBC_SPEC_INTVL_HRS: 3 USE_USER_STAGED_EXTRN_FILES: true +task_run_fcst: + OMP_NUM_THREADS_RUN_FCST: 3 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml index 6c00cb9aab..3ce1146840 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml @@ -14,7 +14,7 @@ workflow: PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml", "parm/wflow/test.yaml"]|include }}' task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS USE_USER_STAGED_EXTRN_FILES: true diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml index b4b29df9fa..b44dd3eec5 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16_plot.yaml @@ -14,7 +14,7 @@ workflow: PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml", "parm/wflow/test.yaml"]|include }}' task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS USE_USER_STAGED_EXTRN_FILES: true diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v17_p8_plot.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v17_p8_plot.yaml index fdf9d3333e..a6e7dd9008 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v17_p8_plot.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v17_p8_plot.yaml @@ -15,7 +15,7 @@ workflow: PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml", "parm/wflow/test.yaml"]|include }}' task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS USE_USER_STAGED_EXTRN_FILES: true diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_RAP_suite_RAP.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_RAP_suite_RAP.yaml index 1e6c5ea724..4ef328a5db 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_RAP_suite_RAP.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_RAP_suite_RAP.yaml @@ -17,7 +17,7 @@ workflow: PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml","parm/wflow/test.yaml"]|include }}' metatask_run_ensemble: task_run_fcst_mem#mem#: walltime: 01:00:00 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml index 23ad640390..8e93259539 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml @@ -3,8 +3,8 @@ metadata: This test is to ensure that the workflow running in community mode completes successfully on the RRFS_CONUS_25km grid using the GFS_v16 physics suite with ICs and LBCs derived from the NAM. - This test also runs with two ensemble members, and ensures the MET - ensemble-specific tasks run successfully. + This test also runs with two ensemble members, runs ploting tasks for each + ensemble member, and ensures the MET ensemble-specific tasks run successfully. user: RUN_ENVIR: community workflow: @@ -16,7 +16,7 @@ workflow: PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml", "parm/wflow/verify_ens.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml", "parm/wflow/verify_ens.yaml", "parm/wflow/test.yaml"]|include }}' metatask_run_ensemble: task_run_fcst_mem#mem#: walltime: 01:00:00 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml index dd5f5a464a..2e4f1dc22f 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml @@ -3,11 +3,10 @@ metadata: This test is to ensure that the workflow running in community mode completes successfully on the RRFS_CONUScompact_25km grid using the RRFS_v1beta physics suite with ICs derived from the HRRR and LBCs derived from the RAP. - It also tests the "DOT_OR_USCORE" option and enables offline UPP 2D decomposition. + It also enables offline UPP 2D decomposition. user: RUN_ENVIR: community workflow: - DOT_OR_USCORE: . CCPP_PHYS_SUITE: FV3_RRFS_v1beta PREDEF_GRID_NAME: RRFS_CONUScompact_25km DATE_FIRST_CYCL: '2020081000' diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta.yaml new file mode 100644 index 0000000000..908b79dc43 --- /dev/null +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_25km_ics_RRFS_lbcs_RRFS_suite_RRFS_v1beta.yaml @@ -0,0 +1,29 @@ +metadata: + description: |- + This test is to ensure that the workflow running in community mode + completes successfully on the RRFS_CONUScompact_25km grid using the RRFS_v1beta + physics suite. It uses RRFS forecasts mapped onto 3-km regular grid (rrfs*.conus.grib2) for + ICs and LBCs. This test uses old v1 sfc_data, not the v2 fractional grid sfc_data. +user: + RUN_ENVIR: community +workflow: + CCPP_PHYS_SUITE: FV3_RRFS_v1beta + PREDEF_GRID_NAME: RRFS_CONUScompact_25km + DATE_FIRST_CYCL: '2024060517' + DATE_LAST_CYCL: '2024060517' + FCST_LEN_HRS: 3 + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: RRFS + FV3GFS_FILE_FMT_ICS: grib2 + USE_USER_STAGED_EXTRN_FILES: true +task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: RRFS + LBC_SPEC_INTVL_HRS: 1 + FV3GFS_FILE_FMT_LBCS: grib2 + USE_USER_STAGED_EXTRN_FILES: true +task_plot_allvars: + COMOUT_REF: "" +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml"]|include }}' diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml index b00a24ae84..35be12a1ee 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml @@ -23,3 +23,5 @@ task_get_extrn_lbcs: USE_USER_STAGED_EXTRN_FILES: true EXTRN_MDL_FILES_LBCS: - '{yy}{jjj}{hh}00{fcst_hr:02d}00' +task_run_fcst: + OMP_NUM_THREADS_RUN_FCST: 3 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml index 44dfec5e75..1265fa8e0c 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUScompact_3km_ics_HRRR_lbcs_RAP_suite_RRFS_v1beta.yaml @@ -24,3 +24,5 @@ task_get_extrn_lbcs: USE_USER_STAGED_EXTRN_FILES: true EXTRN_MDL_FILES_LBCS: - '{yy}{jjj}{hh}00{fcst_hr:02d}00' +task_run_fcst: + OMP_NUM_THREADS_RUN_FCST: 3 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_NA_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_NA_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml index 0928efe3de..ee06009cad 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_NA_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_NA_3km_ics_FV3GFS_lbcs_FV3GFS_suite_RRFS_v1beta.yaml @@ -33,7 +33,7 @@ task_run_prdgen: DO_PARALLEL_PRDGEN: true rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/prdgen.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/prdgen.yaml", "parm/wflow/test.yaml"]|include }}' task_make_orog: walltime: 01:00:00 metatask_run_ensemble: diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml index 1128f2a73c..0d850b0147 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_FV3GFS_lbcs_FV3GFS_suite_WoFS_v0.yaml @@ -12,14 +12,14 @@ workflow: PREDEF_GRID_NAME: SUBCONUS_Ind_3km DATE_FIRST_CYCL: '2019061500' DATE_LAST_CYCL: '2019061500' - FCST_LEN_HRS: 6 + FCST_LEN_HRS: 12 PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml", "parm/wflow/test.yaml"]|include }}' metatask_run_ensemble: task_run_fcst_mem#mem#: - walltime: 01:00:00 + walltime: 02:00:00 task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS FV3GFS_FILE_FMT_ICS: grib2 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_RAP_suite_WoFS_v0.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_RAP_suite_WoFS_v0.yaml index 76ded515af..e8a56e9e1e 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_RAP_suite_WoFS_v0.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_HRRR_lbcs_RAP_suite_WoFS_v0.yaml @@ -12,6 +12,9 @@ workflow: DATE_LAST_CYCL: '2020081000' FCST_LEN_HRS: 6 PREEXISTING_DIR_METHOD: rename +rocoto: + tasks: + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/test.yaml"]|include }}' task_get_extrn_ics: EXTRN_MDL_NAME_ICS: HRRR USE_USER_STAGED_EXTRN_FILES: true diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot.yaml index 6c896151b1..a6dc4ce9b0 100644 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot.yaml +++ b/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_SUBCONUS_Ind_3km_ics_RAP_lbcs_RAP_suite_RRFS_v1beta_plot.yaml @@ -14,7 +14,7 @@ workflow: PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/plot.yaml", "parm/wflow/test.yaml"]|include }}' task_get_extrn_ics: EXTRN_MDL_NAME_ICS: RAP EXTRN_MDL_ICS_OFFSET_HRS: 6 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16.yaml deleted file mode 100644 index 0c129be5b6..0000000000 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_13km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v16.yaml +++ /dev/null @@ -1,26 +0,0 @@ -metadata: - description: |- - This test is to ensure that the workflow running in nco mode completes - successfully on the RRFS_CONUS_13km grid using the GFS_v16 physics - suite with ICs and LBCs derived from the FV3GFS. -user: - RUN_ENVIR: nco -workflow: - CCPP_PHYS_SUITE: FV3_GFS_v16 - PREDEF_GRID_NAME: RRFS_CONUS_13km - DATE_FIRST_CYCL: '2019061500' - DATE_LAST_CYCL: '2019061500' - FCST_LEN_HRS: 6 - PREEXISTING_DIR_METHOD: rename -rocoto: - tasks: - taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' -task_get_extrn_ics: - USE_USER_STAGED_EXTRN_FILES: true - EXTRN_MDL_NAME_ICS: FV3GFS - FV3GFS_FILE_FMT_ICS: grib2 -task_get_extrn_lbcs: - USE_USER_STAGED_EXTRN_FILES: true - EXTRN_MDL_NAME_LBCS: FV3GFS - LBC_SPEC_INTVL_HRS: 3 - FV3GFS_FILE_FMT_LBCS: grib2 diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16.yaml deleted file mode 100644 index 79af5461e3..0000000000 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_25km_ics_FV3GFS_lbcs_FV3GFS_timeoffset_suite_GFS_v16.yaml +++ /dev/null @@ -1,26 +0,0 @@ -metadata: - description: |- - This test is to ensure that the workflow running in nco mode completes - successfully on the RRFS_CONUS_25km grid using the FV3_GFS_v16 physics - suite with time-offset ICs/LBCs derived from the FV3GFS. -user: - RUN_ENVIR: nco -workflow: - CCPP_PHYS_SUITE: FV3_GFS_v16 - PREDEF_GRID_NAME: RRFS_CONUS_25km - DATE_FIRST_CYCL: '2022081012' - DATE_LAST_CYCL: '2022081012' - FCST_LEN_HRS: 6 - PREEXISTING_DIR_METHOD: rename -rocoto: - tasks: - taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' -task_get_extrn_ics: - EXTRN_MDL_NAME_ICS: FV3GFS - EXTRN_MDL_ICS_OFFSET_HRS: 6 - FV3GFS_FILE_FMT_ICS: netcdf -task_get_extrn_lbcs: - EXTRN_MDL_NAME_LBCS: FV3GFS - LBC_SPEC_INTVL_HRS: 3 - EXTRN_MDL_LBCS_OFFSET_HRS: 6 - FV3GFS_FILE_FMT_LBCS: netcdf diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.yaml deleted file mode 100644 index f90b17a95e..0000000000 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUS_3km_ics_FV3GFS_lbcs_FV3GFS_suite_GFS_v15_thompson_mynn_lam3km.yaml +++ /dev/null @@ -1,28 +0,0 @@ -metadata: - description: |- - This test is to ensure that the workflow running in nco mode completes - successfully on the RRFS_CONUS_3km grid using the GFS_v15_thompson_mynn_lam3km - physics suite with ICs and LBCs derived from the FV3GFS. -user: - RUN_ENVIR: nco -workflow: - CCPP_PHYS_SUITE: FV3_GFS_v15_thompson_mynn_lam3km - PREDEF_GRID_NAME: RRFS_CONUS_3km - DATE_FIRST_CYCL: '2019061500' - DATE_LAST_CYCL: '2019061500' - FCST_LEN_HRS: 6 - PREEXISTING_DIR_METHOD: rename -rocoto: - tasks: - taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' -task_get_extrn_ics: - USE_USER_STAGED_EXTRN_FILES: true - EXTRN_MDL_NAME_ICS: FV3GFS - FV3GFS_FILE_FMT_ICS: grib2 -task_get_extrn_lbcs: - USE_USER_STAGED_EXTRN_FILES: true - EXTRN_MDL_NAME_LBCS: FV3GFS - LBC_SPEC_INTVL_HRS: 3 - FV3GFS_FILE_FMT_LBCS: grib2 -task_run_fcst: - USE_MERRA_CLIMO: true diff --git a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml b/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml deleted file mode 100644 index be68e9d45e..0000000000 --- a/tests/WE2E/test_configs/grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml +++ /dev/null @@ -1,30 +0,0 @@ -metadata: - description: |- - This test is to ensure that the workflow running in nco mode completes - successfully on the RRFS_CONUScompact_25km grid using the HRRR physics - suite with ICs derived from the HRRR and LBCs derived from the RAP. -user: - RUN_ENVIR: nco -workflow: - CCPP_PHYS_SUITE: FV3_HRRR - PREDEF_GRID_NAME: RRFS_CONUScompact_25km - DATE_FIRST_CYCL: '2020081000' - DATE_LAST_CYCL: '2020081000' - FCST_LEN_HRS: 6 - PREEXISTING_DIR_METHOD: rename -rocoto: - tasks: - taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' -task_get_extrn_ics: - EXTRN_MDL_NAME_ICS: HRRR - USE_USER_STAGED_EXTRN_FILES: true - EXTRN_MDL_FILES_ICS: - - '{yy}{jjj}{hh}00{fcst_hr:02d}00' -task_get_extrn_lbcs: - EXTRN_MDL_NAME_LBCS: RAP - LBC_SPEC_INTVL_HRS: 3 - USE_USER_STAGED_EXTRN_FILES: true - EXTRN_MDL_FILES_LBCS: - - '{yy}{jjj}{hh}00{fcst_hr:02d}00' -task_run_fcst: - WRITE_DOPOST: true diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2019_halloween_storm.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2019_halloween_storm.yaml new file mode 100644 index 0000000000..9784d7bb44 --- /dev/null +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2019_halloween_storm.yaml @@ -0,0 +1,38 @@ +metadata: + description: |- + This test is to ensure that the workflow running in community mode + completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16 + physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms + for 2019 Halloween Storm. + # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to + # download these files, which can delay the WE2E testing process. + # To capture the event, extend the FCST_LEN_HRS from 6 to 90. +user: + RUN_ENVIR: community +platform: + EXTRN_MDL_DATA_STORES: aws +workflow: + CCPP_PHYS_SUITE: FV3_GFS_v16 + PREDEF_GRID_NAME: RRFS_CONUS_13km + DATE_FIRST_CYCL: '2019102812' + DATE_LAST_CYCL: '2019102812' + FCST_LEN_HRS: 6 + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY + FV3GFS_FILE_FMT_ICS: nemsio +task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY + LBC_SPEC_INTVL_HRS: 3 + FV3GFS_FILE_FMT_LBCS: nemsio +rocoto: + tasks: + task_get_extrn_ics: + walltime: 06:00:00 + task_get_extrn_lbcs: + walltime: 06:00:00 + metatask_run_ensemble: + task_make_lbcs_mem#mem#: + walltime: 06:00:00 + task_run_fcst_mem#mem#: + walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_barry.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_barry.yaml new file mode 100644 index 0000000000..7e766b6ff9 --- /dev/null +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_barry.yaml @@ -0,0 +1,38 @@ +metadata: + description: |- + This test is to ensure that the workflow running in community mode + completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16 + physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms + for 2019 Hurricane Barry. + # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to + # download these files, which can delay the WE2E testing process. + # To capture the event, extend the FCST_LEN_HRS from 6 to 90. +user: + RUN_ENVIR: community +platform: + EXTRN_MDL_DATA_STORES: aws +workflow: + CCPP_PHYS_SUITE: FV3_GFS_v16 + PREDEF_GRID_NAME: RRFS_CONUS_13km + DATE_FIRST_CYCL: '2019071200' + DATE_LAST_CYCL: '2019071200' + FCST_LEN_HRS: 6 + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY + FV3GFS_FILE_FMT_ICS: nemsio +task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY + LBC_SPEC_INTVL_HRS: 3 + FV3GFS_FILE_FMT_LBCS: nemsio +rocoto: + tasks: + task_get_extrn_ics: + walltime: 06:00:00 + task_get_extrn_lbcs: + walltime: 06:00:00 + metatask_run_ensemble: + task_make_lbcs_mem#mem#: + walltime: 06:00:00 + task_run_fcst_mem#mem#: + walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_lorenzo.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_lorenzo.yaml new file mode 100644 index 0000000000..557607d810 --- /dev/null +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2019_hurricane_lorenzo.yaml @@ -0,0 +1,38 @@ +metadata: + description: |- + This test is to ensure that the workflow running in community mode + completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16 + physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms + for 2019 Hurricane Lorenzo. + # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to + # download these files, which can delay the WE2E testing process. + # To capture the event, extend the FCST_LEN_HRS from 6 to 90. +user: + RUN_ENVIR: community +platform: + EXTRN_MDL_DATA_STORES: aws +workflow: + CCPP_PHYS_SUITE: FV3_GFS_v16 + PREDEF_GRID_NAME: RRFS_CONUS_13km + DATE_FIRST_CYCL: '2019092512' + DATE_LAST_CYCL: '2019092512' + FCST_LEN_HRS: 6 + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY + FV3GFS_FILE_FMT_ICS: nemsio +task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY + LBC_SPEC_INTVL_HRS: 3 + FV3GFS_FILE_FMT_LBCS: nemsio +rocoto: + tasks: + task_get_extrn_ics: + walltime: 06:00:00 + task_get_extrn_lbcs: + walltime: 06:00:00 + metatask_run_ensemble: + task_make_lbcs_mem#mem#: + walltime: 06:00:00 + task_run_fcst_mem#mem#: + walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2019_memorial_day_heat_wave.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2019_memorial_day_heat_wave.yaml new file mode 100644 index 0000000000..fcba9c7924 --- /dev/null +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2019_memorial_day_heat_wave.yaml @@ -0,0 +1,36 @@ +metadata: + description: |- + This test is to ensure that the workflow running in community mode + completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16 + physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms + for 2019 Memorial Day Heat Wave. + # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to + # download these files, which can delay the WE2E testing process. + # To capture the event, extend the FCST_LEN_HRS from 6 to 24. +user: + RUN_ENVIR: community +platform: + EXTRN_MDL_DATA_STORES: aws +workflow: + CCPP_PHYS_SUITE: FV3_GFS_v16 + PREDEF_GRID_NAME: RRFS_CONUS_13km + DATE_FIRST_CYCL: '2019052300' + DATE_LAST_CYCL: '2019052300' + FCST_LEN_HRS: 6 + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY + FV3GFS_FILE_FMT_ICS: nemsio +task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY + LBC_SPEC_INTVL_HRS: 6 + FV3GFS_FILE_FMT_LBCS: nemsio +rocoto: + tasks: + task_get_extrn_ics: + walltime: 06:00:00 + task_get_extrn_lbcs: + walltime: 06:00:00 + metatask_run_ensemble: + task_make_lbcs_mem#mem#: + walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2020_CAD.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2020_CAD.yaml index 71e664e17a..fd5740be5d 100644 --- a/tests/WE2E/test_configs/ufs_case_studies/config.2020_CAD.yaml +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2020_CAD.yaml @@ -1,19 +1,12 @@ metadata: description: |- This test is to ensure that the workflow running in community mode - completes successfully on the RRFS_CONUS_13km grid using the GFS_v16 - physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms for 2020 Cold Air Damming case. - #Users can modify date for other test cases listed in the UFS-CASE-STUDIES platforms - #Note for runnning this test on Cheyenne: please modify this config as follows - #task_get_extrn_ics: - # EXTRN_MDL_NAME_ICS: FV3GFS - # FV3GFS_FILE_FMT_ICS: nemsio - # USE_USER_STAGED_EXTRN_FILES: true - #task_get_extrn_lbcs: - # EXTRN_MDL_NAME_LBCS: FV3GFS - # LBC_SPEC_INTVL_HRS: 3 - # FV3GFS_FILE_FMT_LBCS: nemsio - # USE_USER_STAGED_EXTRN_FILES: true + completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16 + physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms + for 2020 Cold Air Damming case. + # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to + # download these files, which can delay the WE2E testing process. + # To capture the event, extend the FCST_LEN_HRS from 6 to 90. user: RUN_ENVIR: community platform: @@ -41,3 +34,5 @@ rocoto: metatask_run_ensemble: task_make_lbcs_mem#mem#: walltime: 06:00:00 + task_run_fcst_mem#mem#: + walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2020_CAPE.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2020_CAPE.yaml new file mode 100644 index 0000000000..cb765604d6 --- /dev/null +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2020_CAPE.yaml @@ -0,0 +1,36 @@ +metadata: + description: |- + This test is to ensure that the workflow running in community mode + completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16 + physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms + for 2020 July Convective Available Potential Energy. + # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to + # download these files, which can delay the WE2E testing process. + # To capture the event, extend the FCST_LEN_HRS from 6 to 24. +user: + RUN_ENVIR: community +platform: + EXTRN_MDL_DATA_STORES: aws +workflow: + CCPP_PHYS_SUITE: FV3_GFS_v16 + PREDEF_GRID_NAME: RRFS_CONUS_13km + DATE_FIRST_CYCL: '2020072300' + DATE_LAST_CYCL: '2020072300' + FCST_LEN_HRS: 6 + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY + FV3GFS_FILE_FMT_ICS: nemsio +task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY + LBC_SPEC_INTVL_HRS: 3 + FV3GFS_FILE_FMT_LBCS: nemsio +rocoto: + tasks: + task_get_extrn_ics: + walltime: 06:00:00 + task_get_extrn_lbcs: + walltime: 06:00:00 + metatask_run_ensemble: + task_make_lbcs_mem#mem#: + walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2020_denver_radiation_inversion.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2020_denver_radiation_inversion.yaml new file mode 100644 index 0000000000..8bf5ece9ee --- /dev/null +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2020_denver_radiation_inversion.yaml @@ -0,0 +1,38 @@ +metadata: + description: |- + This test is to ensure that the workflow running in community mode + completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16 + physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms + for 2020 Denver Radiation Inversion. + # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to + # download these files, which can delay the WE2E testing process. + # To capture the event, extend the FCST_LEN_HRS from 6 to 90. +user: + RUN_ENVIR: community +platform: + EXTRN_MDL_DATA_STORES: aws +workflow: + CCPP_PHYS_SUITE: FV3_GFS_v16 + PREDEF_GRID_NAME: RRFS_CONUS_13km + DATE_FIRST_CYCL: '2020042912' + DATE_LAST_CYCL: '2020042912' + FCST_LEN_HRS: 6 + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY + FV3GFS_FILE_FMT_ICS: nemsio +task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY + LBC_SPEC_INTVL_HRS: 3 + FV3GFS_FILE_FMT_LBCS: nemsio +rocoto: + tasks: + task_get_extrn_ics: + walltime: 06:00:00 + task_get_extrn_lbcs: + walltime: 06:00:00 + metatask_run_ensemble: + task_make_lbcs_mem#mem#: + walltime: 06:00:00 + task_run_fcst_mem#mem#: + walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2020_easter_storm.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2020_easter_storm.yaml new file mode 100644 index 0000000000..3c619c06bb --- /dev/null +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2020_easter_storm.yaml @@ -0,0 +1,38 @@ +metadata: + description: |- + This test is to ensure that the workflow running in community mode + completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16 + physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms + for 2020 Easter Sunday Storm. + # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to + # download these files, which can delay the WE2E testing process. + # To capture the event, extend the FCST_LEN_HRS from 6 to 90. +user: + RUN_ENVIR: community +platform: + EXTRN_MDL_DATA_STORES: aws +workflow: + CCPP_PHYS_SUITE: FV3_GFS_v16 + PREDEF_GRID_NAME: RRFS_CONUS_13km + DATE_FIRST_CYCL: '2020040912' + DATE_LAST_CYCL: '2020040912' + FCST_LEN_HRS: 6 + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY + FV3GFS_FILE_FMT_ICS: nemsio +task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY + LBC_SPEC_INTVL_HRS: 3 + FV3GFS_FILE_FMT_LBCS: nemsio +rocoto: + tasks: + task_get_extrn_ics: + walltime: 06:00:00 + task_get_extrn_lbcs: + walltime: 06:00:00 + metatask_run_ensemble: + task_make_lbcs_mem#mem#: + walltime: 06:00:00 + task_run_fcst_mem#mem#: + walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/ufs_case_studies/config.2020_jan_cold_blast.yaml b/tests/WE2E/test_configs/ufs_case_studies/config.2020_jan_cold_blast.yaml new file mode 100644 index 0000000000..6121228cb8 --- /dev/null +++ b/tests/WE2E/test_configs/ufs_case_studies/config.2020_jan_cold_blast.yaml @@ -0,0 +1,38 @@ +metadata: + description: |- + This test is to ensure that the workflow running in community mode + completes successfully on the RRFS_CONUS_13km grid using the FV3_GFS_v16 + physics suite with ICs and LBCs derived from the UFS-CASE-STUDIES platforms + for 2020 January Cold Blast. + # NOTE: FCST_LEN_HRS is set to 6 because it can take a very long time to + # download these files, which can delay the WE2E testing process. + # To capture the event, extend the FCST_LEN_HRS from 6 to 90. +user: + RUN_ENVIR: community +platform: + EXTRN_MDL_DATA_STORES: aws +workflow: + CCPP_PHYS_SUITE: FV3_GFS_v16 + PREDEF_GRID_NAME: RRFS_CONUS_13km + DATE_FIRST_CYCL: '2020011812' + DATE_LAST_CYCL: '2020011812' + FCST_LEN_HRS: 6 + PREEXISTING_DIR_METHOD: rename +task_get_extrn_ics: + EXTRN_MDL_NAME_ICS: UFS-CASE-STUDY + FV3GFS_FILE_FMT_ICS: nemsio +task_get_extrn_lbcs: + EXTRN_MDL_NAME_LBCS: UFS-CASE-STUDY + LBC_SPEC_INTVL_HRS: 3 + FV3GFS_FILE_FMT_LBCS: nemsio +rocoto: + tasks: + task_get_extrn_ics: + walltime: 06:00:00 + task_get_extrn_lbcs: + walltime: 06:00:00 + metatask_run_ensemble: + task_make_lbcs_mem#mem#: + walltime: 06:00:00 + task_run_fcst_mem#mem#: + walltime: 06:00:00 diff --git a/tests/WE2E/test_configs/verification/config.MET_ensemble_verification_winter_wx.yaml b/tests/WE2E/test_configs/verification/config.MET_ensemble_verification_winter_wx.yaml index dd3de4e472..85a515f293 100644 --- a/tests/WE2E/test_configs/verification/config.MET_ensemble_verification_winter_wx.yaml +++ b/tests/WE2E/test_configs/verification/config.MET_ensemble_verification_winter_wx.yaml @@ -14,7 +14,7 @@ workflow: PREEXISTING_DIR_METHOD: rename rocoto: tasks: - taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml", "parm/wflow/verify_ens.yaml"]|include }}' + taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/verify_pre.yaml", "parm/wflow/verify_det.yaml", "parm/wflow/verify_ens.yaml", "parm/wflow/test.yaml"]|include }}' metatask_run_ensemble: task_run_fcst_mem#mem#: walltime: 01:00:00 diff --git a/tests/WE2E/test_configs/wflow_features/config.deactivate_tasks.yaml b/tests/WE2E/test_configs/wflow_features/config.deactivate_tasks.yaml index 53d10f002a..91114f5b00 100644 --- a/tests/WE2E/test_configs/wflow_features/config.deactivate_tasks.yaml +++ b/tests/WE2E/test_configs/wflow_features/config.deactivate_tasks.yaml @@ -31,3 +31,4 @@ rocoto: task_get_extrn_lbcs: metatask_run_ensemble: metatask_run_ens_post: + metatask_integration_test: diff --git a/tests/WE2E/test_configs/wflow_features/config.nco_ensemble.yaml b/tests/WE2E/test_configs/wflow_features/config.nco_ensemble.yaml deleted file mode 100644 index 739b6bb3c5..0000000000 --- a/tests/WE2E/test_configs/wflow_features/config.nco_ensemble.yaml +++ /dev/null @@ -1,34 +0,0 @@ -metadata: - description: |- - This test checks the capability of the workflow to run ensemble forecasts - (i.e. DO_ENSEMBLE set to "TRUE") in nco mode (i.e. RUN_ENVIR set to - "nco") with the number of ensemble members (NUM_ENS_MEMBERS) set to - "2". The lack of leading zeros in this "2" should cause the ensemble - members to be named "mem1" and "mem2" (instead of, for instance, "mem01" - and "mem02"). - Note also that this test uses two cycle hours ("12" and "18") to test - the capability of the workflow to run ensemble forecasts for more than - one cycle hour in nco mode. -user: - RUN_ENVIR: nco -workflow: - CCPP_PHYS_SUITE: FV3_GFS_v15p2 - PREDEF_GRID_NAME: RRFS_CONUS_25km - DATE_FIRST_CYCL: '2019070100' - DATE_LAST_CYCL: '2019070212' - INCR_CYCL_FREQ: 12 - FCST_LEN_HRS: 6 - PREEXISTING_DIR_METHOD: rename -task_get_extrn_ics: - EXTRN_MDL_NAME_ICS: FV3GFS - USE_USER_STAGED_EXTRN_FILES: true -task_get_extrn_lbcs: - EXTRN_MDL_NAME_LBCS: FV3GFS - LBC_SPEC_INTVL_HRS: 3 - USE_USER_STAGED_EXTRN_FILES: true -global: - DO_ENSEMBLE: true - NUM_ENS_MEMBERS: 2 -rocoto: - tasks: - taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' diff --git a/tests/WE2E/test_configs/wflow_features/config.nco_inline_post.yaml b/tests/WE2E/test_configs/wflow_features/config.nco_inline_post.yaml deleted file mode 120000 index 6ec59fe0dd..0000000000 --- a/tests/WE2E/test_configs/wflow_features/config.nco_inline_post.yaml +++ /dev/null @@ -1 +0,0 @@ -../grids_extrn_mdls_suites_nco/config.nco_grid_RRFS_CONUScompact_25km_ics_HRRR_lbcs_RAP_suite_HRRR.yaml \ No newline at end of file diff --git a/tests/WE2E/test_configs/wflow_features/config.specify_template_filenames.yaml b/tests/WE2E/test_configs/wflow_features/config.specify_template_filenames.yaml index 2c39bc388e..996ea2e7d5 100644 --- a/tests/WE2E/test_configs/wflow_features/config.specify_template_filenames.yaml +++ b/tests/WE2E/test_configs/wflow_features/config.specify_template_filenames.yaml @@ -9,7 +9,7 @@ workflow: DIAG_TABLE_TMPL_FN: diag_table.FV3_GFS_v15p2 FIELD_TABLE_TMPL_FN: field_table.FV3_GFS_v15p2 MODEL_CONFIG_FN: model_configure - NEMS_CONFIG_FN: nems.configure + UFS_CONFIG_FN: ufs.configure CCPP_PHYS_SUITE: FV3_GFS_v15p2 PREDEF_GRID_NAME: RRFS_CONUS_25km DATE_FIRST_CYCL: '2019070100' diff --git a/tests/WE2E/utils.py b/tests/WE2E/utils.py index fb96dab004..0e6629ad17 100755 --- a/tests/WE2E/utils.py +++ b/tests/WE2E/utils.py @@ -21,7 +21,7 @@ cfg_to_yaml_str, flatten_dict, load_config_file, - load_shell_config + load_yaml_config ) REPORT_WIDTH = 100 @@ -154,13 +154,13 @@ def calculate_core_hours(expts_dict: dict) -> dict: for expt in expts_dict: # Read variable definitions file - vardefs_file = os.path.join(expts_dict[expt]["expt_dir"],"var_defns.sh") + vardefs_file = os.path.join(expts_dict[expt]["expt_dir"],"var_defns.yaml") if not os.path.isfile(vardefs_file): logging.warning(f"\nWARNING: For experiment {expt}, variable definitions file") logging.warning(f"{vardefs_file}\ndoes not exist!\n\nDropping experiment from summary") continue logging.debug(f'Reading variable definitions file {vardefs_file}') - vardefs = load_shell_config(vardefs_file) + vardefs = load_yaml_config(vardefs_file) vdf = flatten_dict(vardefs) cores_per_node = vdf["NCORES_PER_NODE"] for task in expts_dict[expt]: @@ -530,7 +530,7 @@ def compare_rocotostat(expt_dict,name): continue line_array = line.split() # Skip header lines - if line_array[0] == 'CYCLE': + if line_array[0] == 'CYCLE' or line_array[0] == '/apps/rocoto/1.3.3/lib/workflowmgr/launchserver.rb:40:': continue # We should now just have lines describing jobs, in the form: # line_array = ['cycle','task','jobid','status','exit status','num tries','walltime'] diff --git a/tests/build.sh b/tests/build.sh index caf0e2b0ae..f230354a61 100755 --- a/tests/build.sh +++ b/tests/build.sh @@ -21,7 +21,7 @@ function usage() { exit 1 } -machines=( hera jet cheyenne derecho orion hercules wcoss2 gaea-c5 odin singularity macos noaacloud ) +machines=( hera jet cheyenne derecho orion hercules wcoss2 gaea odin singularity macos noaacloud ) [[ $# -gt 4 ]] && usage diff --git a/tests/test_python/test_create_model_configure_file.py b/tests/test_python/test_create_model_configure_file.py index 9475028505..d5aea79ed8 100644 --- a/tests/test_python/test_create_model_configure_file.py +++ b/tests/test_python/test_create_model_configure_file.py @@ -43,9 +43,11 @@ def setUp(self): set_env_var("USHdir", USHdir) set_env_var("MODEL_CONFIG_FN", MODEL_CONFIG_FN) set_env_var("MODEL_CONFIG_TMPL_FP", MODEL_CONFIG_TMPL_FP) + set_env_var("PE_MEMBER01", 24) set_env_var("FCST_LEN_HRS", 72) set_env_var("FHROT", 0) set_env_var("DT_ATMOS", 1) + set_env_var("OMP_NUM_THREADS_RUN_FCST", 1) set_env_var("RESTART_INTERVAL", 4) set_env_var("ITASKS", 1) diff --git a/tests/test_python/test_generate_FV3LAM_wflow.py b/tests/test_python/test_generate_FV3LAM_wflow.py index 9e9e9f5274..48029d21b6 100644 --- a/tests/test_python/test_generate_FV3LAM_wflow.py +++ b/tests/test_python/test_generate_FV3LAM_wflow.py @@ -8,12 +8,9 @@ from multiprocessing import Process from python_utils import ( - load_config_file, - update_dict, cp_vrfy, run_command, define_macos_utilities, - cfg_to_yaml_str, set_env_var, get_env_var, ) @@ -24,7 +21,7 @@ class Testing(unittest.TestCase): """ Class to run the tests. """ def test_generate_FV3LAM_wflow(self): - """ Test that a community and nco sample config can successfully + """ Test that a sample config can successfully lead to the creation of an experiment directory. No jobs are submitted. """ @@ -49,30 +46,6 @@ def run_workflow(USHdir, logfile): ) run_workflow(USHdir, logfile) - # nco test case - nco_test_config = load_config_file(f"{USHdir}/config.nco.yaml") - # Since we don't have a pre-gen grid dir on a generic linux - # platform, turn the make_* tasks on for this test. - cfg_updates = { - "user": { - "MACHINE": "linux", - }, - "rocoto": { - "tasks": { - "taskgroups": \ - """'{{ ["parm/wflow/prep.yaml", - "parm/wflow/coldstart.yaml", - "parm/wflow/post.yaml"]|include }}'""" - }, - }, - } - update_dict(cfg_updates, nco_test_config) - - with open(f"{USHdir}/config.yaml", "w", encoding="utf-8") as cfg_file: - cfg_file.write(cfg_to_yaml_str(nco_test_config)) - - run_workflow(USHdir, logfile) - def setUp(self): define_macos_utilities() set_env_var("DEBUG", False) diff --git a/tests/test_python/test_retrieve_data.py b/tests/test_python/test_retrieve_data.py index 1d54e0904c..2c749c97ac 100644 --- a/tests/test_python/test_retrieve_data.py +++ b/tests/test_python/test_retrieve_data.py @@ -493,61 +493,3 @@ def test_ufs_lbcs_from_aws(self): # Testing that there is no failure retrieve_data.main(args) - - @unittest.skipIf(os.environ.get("CI") == "true", "Skipping HPSS tests") - def test_rap_obs_from_hpss(self): - - """Get RAP observations from hpss for a 06z time""" - - with tempfile.TemporaryDirectory(dir=self.path) as tmp_dir: - os.chdir(tmp_dir) - - # fmt: off - args = [ - '--file_set', 'obs', - '--config', self.config, - '--cycle_date', '2023032106', - '--data_stores', 'hpss', - '--data_type', 'RAP_obs', - '--output_path', tmp_dir, - '--debug', - ] - # fmt: on - - retrieve_data.main(args) - - # Verify files exist in temp dir - - path = os.path.join(tmp_dir, "*") - files_on_disk = glob.glob(path) - self.assertEqual(len(files_on_disk), 30) - - @unittest.skipIf(os.environ.get("CI") == "true", "Skipping HPSS tests") - def test_rap_e_obs_from_hpss(self): - - """Get RAP observations from hpss for a 12z time; - at 00z and 12z we expect to see additional files - with the 'rap_e' naming convention""" - - with tempfile.TemporaryDirectory(dir=self.path) as tmp_dir: - os.chdir(tmp_dir) - - # fmt: off - args = [ - '--file_set', 'obs', - '--config', self.config, - '--cycle_date', '2023032112', - '--data_stores', 'hpss', - '--data_type', 'RAP_obs', - '--output_path', tmp_dir, - '--debug', - ] - # fmt: on - - retrieve_data.main(args) - - # Verify files exist in temp dir - - path = os.path.join(tmp_dir, "*") - files_on_disk = glob.glob(path) - self.assertEqual(len(files_on_disk), 37) diff --git a/tests/test_python/test_set_FV3nml_ens_stoch_seeds.py b/tests/test_python/test_set_fv3nml_ens_stoch_seeds.py similarity index 68% rename from tests/test_python/test_set_FV3nml_ens_stoch_seeds.py rename to tests/test_python/test_set_fv3nml_ens_stoch_seeds.py index f87d57d53b..17bf74c04b 100644 --- a/tests/test_python/test_set_FV3nml_ens_stoch_seeds.py +++ b/tests/test_python/test_set_fv3nml_ens_stoch_seeds.py @@ -1,4 +1,4 @@ -""" Tests for set_FV3nml_ens_stoch_seeds.py """ +""" Tests for set_fv3nml_ens_stoch_seeds.py """ #pylint: disable=invalid-name @@ -15,18 +15,17 @@ set_env_var, ) -from set_FV3nml_ens_stoch_seeds import set_FV3nml_ens_stoch_seeds +from set_fv3nml_ens_stoch_seeds import set_fv3nml_ens_stoch_seeds class Testing(unittest.TestCase): """ Define the tests """ - def test_set_FV3nml_ens_stoch_seeds(self): + def test_set_fv3nml_ens_stoch_seeds(self): """ Call the function and make sure it doesn't fail""" os.chdir(self.mem_dir) - set_FV3nml_ens_stoch_seeds(cdate=self.cdate) + set_fv3nml_ens_stoch_seeds(cdate=self.cdate, expt_config=self.config) def setUp(self): define_macos_utilities() - set_env_var("DEBUG", True) set_env_var("VERBOSE", True) self.cdate = datetime(2021, 1, 1) test_dir = os.path.dirname(os.path.abspath(__file__)) @@ -55,17 +54,22 @@ def setUp(self): ) - set_env_var("USHdir", USHdir) set_env_var("ENSMEM_INDX", 2) - set_env_var("FV3_NML_FN", "input.nml") - set_env_var("FV3_NML_FP", os.path.join(self.mem_dir, "input.nml")) - set_env_var("DO_SHUM", True) - set_env_var("DO_SKEB", True) - set_env_var("DO_SPPT", True) - set_env_var("DO_SPP", True) - set_env_var("DO_LSM_SPP", True) - ISEED_SPP = [4, 5, 6, 7, 8] - set_env_var("ISEED_SPP", ISEED_SPP) + + self.config = { + "workflow": { + "VERBOSE": True, + "FV3_NML_FN": "input.nml", + }, + "global": { + "DO_SHUM": True, + "DO_SKEB": True, + "DO_SPPT": True, + "DO_SPP": True, + "DO_LSM_SPP": True, + "ISEED_SPP": [4, 5, 6, 7, 8], + }, + } def tearDown(self): self.tmp_dir.cleanup() diff --git a/tests/test_python/test_set_FV3nml_sfc_climo_filenames.py b/tests/test_python/test_set_fv3nml_sfc_climo_filenames.py similarity index 69% rename from tests/test_python/test_set_FV3nml_sfc_climo_filenames.py rename to tests/test_python/test_set_fv3nml_sfc_climo_filenames.py index 131af70506..b0daf50fea 100644 --- a/tests/test_python/test_set_FV3nml_sfc_climo_filenames.py +++ b/tests/test_python/test_set_fv3nml_sfc_climo_filenames.py @@ -1,4 +1,4 @@ -""" Tests for set_FV3nml_sfc_climo_filenames.py """ +""" Tests for set_fv3nml_sfc_climo_filenames.py """ #pylint: disable=invalid-name @@ -12,13 +12,13 @@ mkdir_vrfy, set_env_var, ) -from set_FV3nml_sfc_climo_filenames import set_FV3nml_sfc_climo_filenames +from set_fv3nml_sfc_climo_filenames import set_fv3nml_sfc_climo_filenames class Testing(unittest.TestCase): """ Define the tests """ - def test_set_FV3nml_sfc_climo_filenames(self): + def test_set_fv3nml_sfc_climo_filenames(self): """ Call the function and don't raise an Exception. """ - set_FV3nml_sfc_climo_filenames() + set_fv3nml_sfc_climo_filenames(config=self.config) def setUp(self): define_macos_utilities() @@ -42,13 +42,15 @@ def setUp(self): os.path.join(PARMdir, "input.nml.FV3"), os.path.join(EXPTDIR, "input.nml"), ) - set_env_var("PARMdir", PARMdir) - set_env_var("EXPTDIR", EXPTDIR) - set_env_var("FIXlam", FIXlam) - set_env_var("DO_ENSEMBLE", False) - set_env_var("CRES", "C3357") - set_env_var("RUN_ENVIR", "nco") - set_env_var("FV3_NML_FP", os.path.join(EXPTDIR, "input.nml")) + self.config = { + "CRES": "C3357", + "DO_ENSEMBLE": False, + "EXPTDIR": EXPTDIR, + "FIXlam": FIXlam, + "FV3_NML_FP": os.path.join(EXPTDIR, "input.nml"), + "PARMdir": PARMdir, + "RUN_ENVIR": "nco", + } def tearDown(self): self.tmp_dir.cleanup() diff --git a/tests/test_python/test_set_ozone_param.py b/tests/test_python/test_set_ozone_param.py deleted file mode 100644 index 1d0e6d6aa7..0000000000 --- a/tests/test_python/test_set_ozone_param.py +++ /dev/null @@ -1,45 +0,0 @@ -""" Tests for set_ozone_param.py """ - -#pylint: disable=invalid-name - -import os -import unittest - -from set_ozone_param import set_ozone_param - -class Testing(unittest.TestCase): - """ Define the tests """ - def test_set_ozone_param(self): - """ Test that when the CCPP phyiscs suite XML is provided that - activates ozone, the expected ozone parameter is returned""" - test_dir = os.path.dirname(os.path.abspath(__file__)) - USHdir = os.path.join(test_dir, "..", "..", "ush") - ozone_param, _, _ = set_ozone_param( - os.path.join(USHdir, "test_data", "suite_FV3_GSD_SAR.xml"), - self.CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING, - ) - self.assertEqual("ozphys_2015", ozone_param) - - def setUp(self): - self.CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING = [ - "aerosol.dat | global_climaeropac_global.txt", - "co2historicaldata_2010.txt | fix_co2_proj/global_co2historicaldata_2010.txt", - "co2historicaldata_2011.txt | fix_co2_proj/global_co2historicaldata_2011.txt", - "co2historicaldata_2012.txt | fix_co2_proj/global_co2historicaldata_2012.txt", - "co2historicaldata_2013.txt | fix_co2_proj/global_co2historicaldata_2013.txt", - "co2historicaldata_2014.txt | fix_co2_proj/global_co2historicaldata_2014.txt", - "co2historicaldata_2015.txt | fix_co2_proj/global_co2historicaldata_2015.txt", - "co2historicaldata_2016.txt | fix_co2_proj/global_co2historicaldata_2016.txt", - "co2historicaldata_2017.txt | fix_co2_proj/global_co2historicaldata_2017.txt", - "co2historicaldata_2018.txt | fix_co2_proj/global_co2historicaldata_2018.txt", - "co2historicaldata_2019.txt | fix_co2_proj/global_co2historicaldata_2019.txt", - "co2historicaldata_2020.txt | fix_co2_proj/global_co2historicaldata_2020.txt", - "co2historicaldata_2021.txt | fix_co2_proj/global_co2historicaldata_2021.txt", - "co2historicaldata_glob.txt | global_co2historicaldata_glob.txt", - "co2monthlycyc.txt | co2monthlycyc.txt", - "global_h2oprdlos.f77 | global_h2o_pltc.f77", - "global_zorclim.1x1.grb | global_zorclim.1x1.grb", - "sfc_emissivity_idx.txt | global_sfc_emissivity_idx.txt", - "solarconstant_noaa_an.txt | global_solarconstant_noaa_an.txt", - "global_o3prdlos.f77 | ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77", - ] diff --git a/ush/bash_utils/check_for_preexist_dir_file.sh b/ush/bash_utils/check_for_preexist_dir_file.sh index 4ca55766d2..2843222230 100644 --- a/ush/bash_utils/check_for_preexist_dir_file.sh +++ b/ush/bash_utils/check_for_preexist_dir_file.sh @@ -107,7 +107,7 @@ where the arguments are defined as follows: # "delete") - rm_vrfy -rf "${dir_or_file}" + rm -rf "${dir_or_file}" ;; # #----------------------------------------------------------------------- @@ -134,7 +134,7 @@ Specified directory or file (dir_or_file) already exists: Moving (renaming) preexisting directory or file to: old_dir_or_file = \"${old_dir_or_file}\"" - mv_vrfy "${dir_or_file}" "${old_dir_or_file}" + mv "${dir_or_file}" "${old_dir_or_file}" ;; # #----------------------------------------------------------------------- diff --git a/ush/bash_utils/check_var_valid_value.sh b/ush/bash_utils/check_var_valid_value.sh index 21288184db..5b942c1f73 100644 --- a/ush/bash_utils/check_var_valid_value.sh +++ b/ush/bash_utils/check_var_valid_value.sh @@ -96,7 +96,7 @@ where the arguments are defined as follows: var_value=${!var_name} valid_var_values_at="$valid_var_values_array_name[@]" - valid_var_values=("${!valid_var_values_at}") + valid_var_values=("${!valid_var_values_at:-}") if [ "$#" -eq 3 ]; then err_msg="$3" diff --git a/ush/bash_utils/create_symlink_to_file.sh b/ush/bash_utils/create_symlink_to_file.sh index 38099ffb8e..0cfcdc9fdf 100644 --- a/ush/bash_utils/create_symlink_to_file.sh +++ b/ush/bash_utils/create_symlink_to_file.sh @@ -16,78 +16,21 @@ function create_symlink_to_file() { # #----------------------------------------------------------------------- # -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# - local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) - local scrfunc_fn=$( basename "${scrfunc_fp}" ) - local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# - local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# # Specify the set of valid argument names for this script/function. Then # process the arguments provided to this script/function (which should # consist of a set of name-value pairs of the form arg1="value1", etc). # #----------------------------------------------------------------------- # - local valid_args=( \ -"target" \ -"symlink" \ -"relative" \ - ) - process_args valid_args "$@" -# -#----------------------------------------------------------------------- -# -# For debugging purposes, print out values of arguments passed to this -# script. Note that these will be printed out only if VERBOSE is set to -# TRUE. -# -#----------------------------------------------------------------------- -# - print_input_args valid_args -# -#----------------------------------------------------------------------- -# -# Verify that the required arguments to this function have been specified. -# If not, print out an error message and exit. -# -#----------------------------------------------------------------------- -# - if [ -z "${target}" ]; then - print_err_msg_exit "\ -The argument \"target\" specifying the target of the symbolic link that -this function will create was not specified in the call to this function: - target = \"$target\"" - fi +if [[ $# -lt 2 ]]; then + usage + print_err_msg_exit "Function create_symlink_to_file() requires at least two arguments" +fi - if [ -z "${symlink}" ]; then - print_err_msg_exit "\ -The argument \"symlink\" specifying the symbolic link that this function -will create was not specified in the call to this function: - symlink = \"$symlink\"" - fi +target=$1 +symlink=$2 +relative=${3:-TRUE} +relative=$(boolify $relative) # #----------------------------------------------------------------------- # @@ -106,8 +49,6 @@ will create was not specified in the call to this function: # #----------------------------------------------------------------------- # - relative=${relative:-"TRUE"} - valid_vals_relative=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") check_var_valid_value "relative" "valid_vals_relative" # @@ -140,24 +81,9 @@ not exist or is not a file: # # Create the symlink. # -# Important note: -# In the ln_vrfy command below, do not quote ${relative_flag} because if -# is quoted (either single or double quotes) but happens to be a null -# string, it will be treated as the (empty) name of (or path to) the -# target and will cause an error. -# -#----------------------------------------------------------------------- -# - ln_vrfy -sf ${relative_flag} "$target" "$symlink" -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# #----------------------------------------------------------------------- # - { restore_shell_opts; } > /dev/null 2>&1 +ln -sf ${relative_flag} "$target" "$symlink" } diff --git a/ush/bash_utils/eval_METplus_timestr_tmpl.sh b/ush/bash_utils/eval_METplus_timestr_tmpl.sh index 245369509b..572f7c68c4 100644 --- a/ush/bash_utils/eval_METplus_timestr_tmpl.sh +++ b/ush/bash_utils/eval_METplus_timestr_tmpl.sh @@ -163,9 +163,23 @@ cannot be empty: #----------------------------------------------------------------------- # case "${METplus_time_fmt}" in - "%Y%m%d%H"|"%Y%m%d"|"%H%M%S"|"%H") + "%Y%m%d%H"|"%Y%m%d"|"%H%M%S") fmt="${METplus_time_fmt}" ;; + "%H") +# +# The "%H" format needs to be treated differently depending on if it's +# formatting a "lead" time type or another (e.g. "init" or "vald") because +# for "lead", the printf function is used below (which doesn't understand +# the "%H" format) whereas for the others, the date utility is used (which +# does understand "%H"). +# + if [ "${METplus_time_type}" = "lead" ]; then + fmt="%02.0f" + else + fmt="${METplus_time_fmt}" + fi + ;; "%HHH") # # Print format assumes that the argument to printf (i.e. the number to diff --git a/ush/bash_utils/filesys_cmds_vrfy.sh b/ush/bash_utils/filesys_cmds_vrfy.sh deleted file mode 100644 index b355d293ad..0000000000 --- a/ush/bash_utils/filesys_cmds_vrfy.sh +++ /dev/null @@ -1,280 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This is a generic function that executes the specified command (e.g. -# "cp", "mv", etc) with the specified options/arguments and then verifies -# that the command executed without errors. The first argument to this -# function is the command to execute while the remaining ones are the -# options/arguments to be passed to that command. -# -#----------------------------------------------------------------------- -# -function filesys_cmd_vrfy() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# - local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) - local scrfunc_fn=$( basename "${scrfunc_fp}" ) - local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# - local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Get information about the script or function that calls this function. -# Note that caller_name will be set as follows: -# -# 1) If the caller is a function, caller_name will be set to the name of -# that function. -# 2) If the caller is a sourced script, caller_name will be set to -# "script". Note that a sourced script cannot be the top level -# script since by defintion, it is sourced by another script or -# function. -# 3) If the caller is the top-level script, caller_name will be set to -# "main". -# -# Thus, if caller_name is set to "script" or "main", the caller is a -# script, and if it is set to anything else, the caller is a function. -# -# Below, the index into FUNCNAME and BASH_SOURCE is 2 (not 1 as is usually -# the case) because this function is called by functions such as cp_vrfy, -# mv_vrfy, rm_vrfy, ln_vrfy, mkdir_vrfy, and cd_vrfy, but these are just -# wrappers, and in the error and informational messages, we are really -# interested in the scripts/functions that in turn call these wrappers. -# -#----------------------------------------------------------------------- -# - local caller_name="main" - local caller_fp="" - if [ -z "${BASH_SOURCE[2]-x}" ]; then - caller_fp=$( $READLINK -f "${BASH_SOURCE[2]}" ) - local caller_fn=$( basename "${caller_fp}" ) - local caller_dir=$( dirname "${caller_fp}" ) - caller_name="${FUNCNAME[2]}" - fi -# -#----------------------------------------------------------------------- -# -# Declare local variables that are used later below. -# -#----------------------------------------------------------------------- -# - local cmd \ - output \ - exit_code \ - double_space \ - script_or_function -# -#----------------------------------------------------------------------- -# -# Check that at least one argument is supplied. -# -#----------------------------------------------------------------------- -# - if [ "$#" -lt 1 ]; then - - print_err_msg_exit " -Incorrect number of arguments specified: - - Function name: \"${func_name}\" - Number of arguments specified: $# - -Usage: - - ${func_name} cmd [args_to_cmd] - -where \"cmd\" is the name of the command to execute and \"args_to_cmd\" -are zero or more options and arguments to pass to that command. -" - - fi -# -#----------------------------------------------------------------------- -# -# The first argument to this function is the command to execute while -# the remaining ones are the arguments to that command. Extract the -# command and save it in the variable "cmd". Then shift the argument -# list so that $@ contains the arguments to the command but not the -# name of the command itself. -# -#----------------------------------------------------------------------- -# - cmd="$1" - shift -# -#----------------------------------------------------------------------- -# -# Pass the arguments to the command and execute it, saving the outputs -# to stdout and stderr in the variable "output". Also, save the exit -# code from the execution. -# -#----------------------------------------------------------------------- -# - local output=$( "$cmd" "$@" 2>&1 ) - local exit_code=$? -# -#----------------------------------------------------------------------- -# -# If output is not empty, it will be printed to stdout below either as -# an error message or an informational message. In either case, format -# it by adding a double space to the beginning of each line. -# -#----------------------------------------------------------------------- -# - if [ -n "$output" ]; then - local double_space=" " - output="${double_space}${output}" - output=${output/$'\n'/$'\n'${double_space}} - fi -# -#----------------------------------------------------------------------- -# -# If the exit code from the execution of cmd above is nonzero, print out -# an error message and exit. -# -#----------------------------------------------------------------------- -# - if [ "${caller_name}" = "main" ] || \ - [ "${caller_name}" = "script" ]; then - local script_or_function="the script" - else - local script_or_function="function \"${caller_name}\"" - fi - - if [ ${exit_code} -ne 0 ]; then - - print_err_msg_exit "\ -Call to function \"${cmd}_vrfy\" failed. This function was called from -${script_or_function} in file: - - \"${caller_fp}\" - -Error message from \"${cmd}_vrfy\" function's \"$cmd\" operation: -$output" - - fi -# -#----------------------------------------------------------------------- -# -# If the exit code from the execution of cmd above is zero, continue. -# -# First, check if cmd is set to "cd". If so, the execution of cmd above -# in a separate subshell [which is what happens when using the $("$cmd") -# construct above] will change directory in that subshell but not in the -# current shell. Thus, rerun the "cd" command in the current shell. -# -#----------------------------------------------------------------------- -# - if [ "$cmd" = "cd" ]; then - "$cmd" "$@" 2>&1 > /dev/null - fi -# -#----------------------------------------------------------------------- -# -# If output is not empty, print out whatever message it contains (e.g. -# it might contain a warning or other informational message). -# -#----------------------------------------------------------------------- -# - if [ -n "$output" ]; then - - print_info_msg " -\"${cmd}_vrfy\" operation returned with a message. This command was -issued from ${script_or_function} in file: - - \"${caller_fp}\" - -Message from \"${cmd}_vrfy\" function's \"$cmd\" operation: -$output" - - fi -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 - -} - - -# -#----------------------------------------------------------------------- -# -# The following are functions are counterparts of common filesystem -# commands "with verification", i.e. they execute a filesystem command -# (such as "cp" and "mv") and then verify that the execution was successful. -# -# These functions are called using the "filesys_cmd_vrfy" function defined -# above. In each of these functions, we: -# -# 1) Save current shell options (in a global array) and then set new -# options for this script/function. -# 2) Call the generic function "filesys_cmd_vrfy" with the command of -# interest (e.g. "cp") as the first argument and the arguments passed -# in as the rest. -# 3) Restore the shell options saved at the beginning of the function. -# -#----------------------------------------------------------------------- -# - -function cp_vrfy() { - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 - filesys_cmd_vrfy "cp" "$@" - { restore_shell_opts; } > /dev/null 2>&1 -} - -function mv_vrfy() { - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 - filesys_cmd_vrfy "mv" "$@" - { restore_shell_opts; } > /dev/null 2>&1 -} - -function rm_vrfy() { - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 - filesys_cmd_vrfy "rm" "$@" - { restore_shell_opts; } > /dev/null 2>&1 -} - -function ln_vrfy() { - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 - filesys_cmd_vrfy "$LN_UTIL" "$@" - { restore_shell_opts; } > /dev/null 2>&1 -} - -function mkdir_vrfy() { - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 - filesys_cmd_vrfy "mkdir" "$@" - { restore_shell_opts; } > /dev/null 2>&1 -} - -function cd_vrfy() { - { save_shell_opts; . ${USHdir}/preamble.sh; } > /dev/null 2>&1 - filesys_cmd_vrfy "cd" "$@" - { restore_shell_opts; } > /dev/null 2>&1 -} - diff --git a/ush/bash_utils/print_msg.sh b/ush/bash_utils/print_msg.sh index 28a70d1431..8b032f9698 100644 --- a/ush/bash_utils/print_msg.sh +++ b/ush/bash_utils/print_msg.sh @@ -68,7 +68,7 @@ function print_info_msg() { elif [ "$#" -eq 2 ]; then - verbose="$1" + verbose=$(boolify "$1") info_msg="$2" # #----------------------------------------------------------------------- diff --git a/ush/bash_utils/source_config.sh b/ush/bash_utils/source_config.sh deleted file mode 100644 index df5a79a0df..0000000000 --- a/ush/bash_utils/source_config.sh +++ /dev/null @@ -1,53 +0,0 @@ -# -#----------------------------------------------------------------------- -# This file defines function that sources a config file (yaml/json etc) -# into the calling shell script -#----------------------------------------------------------------------- -# - -function config_to_str() { - $USHdir/config_utils.py -o $1 -c $2 "${@:3}" -} - -# -#----------------------------------------------------------------------- -# Define functions for different file formats -#----------------------------------------------------------------------- -# -function config_to_shell_str() { - config_to_str shell "$@" -} -function config_to_ini_str() { - config_to_str ini "$@" -} -function config_to_yaml_str() { - config_to_str yaml "$@" -} -function config_to_json_str() { - config_to_str json "$@" -} -function config_to_xml_str() { - config_to_str xml "$@" -} - -# -#----------------------------------------------------------------------- -# Source contents of a config file to shell script -#----------------------------------------------------------------------- -# -function source_config() { - - source <( config_to_shell_str "$@" ) - -} -# -#----------------------------------------------------------------------- -# Source partial contents of a config file to shell script. -# Only those variables needed by the task are sourced -#----------------------------------------------------------------------- -# -function source_config_for_task() { - - source <( config_to_shell_str "${@:2}" -k "(^(?!task_)|$1).*" ) - -} diff --git a/ush/bash_utils/source_yaml.sh b/ush/bash_utils/source_yaml.sh new file mode 100644 index 0000000000..669408416e --- /dev/null +++ b/ush/bash_utils/source_yaml.sh @@ -0,0 +1,36 @@ + + +function source_yaml () { + + local func_name="${FUNCNAME[0]}" + + if [ "$#" -lt 1 ] ; then + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: ${func_name} + Number of args specified: $# + +Usage: + + ${func_name} yaml_file [section] + + yaml_file: path to the YAML file to source + section: optional subsection of yaml +" + fi + local section + yaml_file=$1 + section=$2 + + while read -r line ; do + + + # A regex to match list representations + line=$(echo "$line" | sed -E "s/='\[(.*)\]'/=(\1)/") + line=${line//,/} + line=${line//\"/} + line=${line/None/} + source <( echo "${line}" ) + done < <(uw config realize -i "${yaml_file}" --output-format sh --key-path $section) +} diff --git a/ush/config.aqm.nco.realtime.yaml b/ush/config.aqm.nco.realtime.yaml deleted file mode 100644 index f2299eacc9..0000000000 --- a/ush/config.aqm.nco.realtime.yaml +++ /dev/null @@ -1,99 +0,0 @@ -metadata: - description: config for Online-CMAQ, AQM_NA_13km, real-time, NCO mode on WCOSS2 -user: - RUN_ENVIR: nco - MACHINE: wcoss2 - ACCOUNT: [account name] -workflow: - USE_CRON_TO_RELAUNCH: true - CRON_RELAUNCH_INTVL_MNTS: 3 - EXPT_SUBDIR: aqm_nco_aqmna13km - PREDEF_GRID_NAME: AQM_NA_13km - CCPP_PHYS_SUITE: FV3_GFS_v16 - DATE_FIRST_CYCL: '2023051600' - DATE_LAST_CYCL: '2023051618' - INCR_CYCL_FREQ: 6 - FCST_LEN_HRS: -1 - FCST_LEN_CYCL: - - 6 - - 72 - - 72 - - 6 - PREEXISTING_DIR_METHOD: rename - VERBOSE: true - DEBUG: true - COMPILER: intel - DIAG_TABLE_TMPL_FN: diag_table_aqm.FV3_GFS_v16 - FIELD_TABLE_TMPL_FN: field_table_aqm.FV3_GFS_v16 - DO_REAL_TIME: true - COLDSTART: false - WARMSTART_CYCLE_DIR: /path/to/restart/dir -nco: - envir_default: prod - NET_default: aqm - model_ver_default: v7.0 - RUN_default: aqm - OPSROOT_default: /path/to/custom/opsroot - KEEPDATA_default: true -rocoto: - tasks: - taskgroups: '{{ ["parm/wflow/aqm_prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml", "parm/wflow/aqm_post.yaml"]|include }}' - task_get_extrn_lbcs: - walltime: 02:00:00 - metatask_run_ensemble: - task_run_fcst_mem#mem#: - walltime: 04:00:00 -# task_aqm_ics_ext: - task_aqm_lbcs: - walltime: 01:00:00 -task_make_grid: - GRID_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/DOMAIN_DATA/AQM_NA_13km -task_make_orog: - OROG_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/DOMAIN_DATA/AQM_NA_13km -task_make_sfc_climo: - SFC_CLIMO_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/DOMAIN_DATA/AQM_NA_13km -task_get_extrn_ics: - EXTRN_MDL_NAME_ICS: FV3GFS - FV3GFS_FILE_FMT_ICS: netcdf - EXTRN_MDL_ICS_OFFSET_HRS: 6 -task_get_extrn_lbcs: - EXTRN_MDL_NAME_LBCS: FV3GFS - LBC_SPEC_INTVL_HRS: 6 - FV3GFS_FILE_FMT_LBCS: netcdf - EXTRN_MDL_LBCS_OFFSET_HRS: 6 -task_run_fcst: - DT_ATMOS: 180 - LAYOUT_X: 50 - LAYOUT_Y: 34 - BLOCKSIZE: 16 - RESTART_INTERVAL: 6 24 42 60 - QUILTING: true - PRINT_ESMF: false - DO_FCST_RESTART: false -task_run_post: - POST_OUTPUT_DOMAIN_NAME: 793 -global: - DO_ENSEMBLE: false - NUM_ENS_MEMBERS: 2 - HALO_BLEND: 0 -cpl_aqm_parm: - CPL_AQM: true - DO_AQM_CHEM_LBCS: true - DO_AQM_GEFS_LBCS: true - DO_AQM_DUST: true - DO_AQM_CANOPY: false - DO_AQM_PRODUCT: true - DO_AQM_SAVE_AIRNOW_HIST: false - DO_AQM_SAVE_FIRE: false - AQM_BIO_FILE: BEIS_RRFScmaq_C775.ncf - AQM_DUST_FILE_PREFIX: FENGSHA_p8_10km_inputs - AQM_DUST_FILE_SUFFIX: .nc - AQM_CANOPY_FILE_PREFIX: gfs.t12z.geo - AQM_CANOPY_FILE_SUFFIX: .canopy_regrid.nc - AQM_FIRE_FILE_PREFIX: Hourly_Emissions_regrid_NA_13km - AQM_FIRE_FILE_SUFFIX: _h72.nc - AQM_RC_FIRE_FREQUENCY: hourly - AQM_LBCS_FILES: am4_bndy.c793.2019.v1.nc - NEXUS_GRID_FN: grid_spec_793.nc - NUM_SPLIT_NEXUS: 6 - diff --git a/ush/config.aqm.community.yaml b/ush/config.aqm.yaml similarity index 69% rename from ush/config.aqm.community.yaml rename to ush/config.aqm.yaml index 30e391edf2..21a73591ee 100644 --- a/ush/config.aqm.community.yaml +++ b/ush/config.aqm.yaml @@ -1,38 +1,41 @@ metadata: - description: config for Online-CMAQ, AQM_NA_13km, community mode + description: config for SRW-AQM, AQM_NA_13km, warm-start user: RUN_ENVIR: community - MACHINE: [hera or wcoss2] + MACHINE: [hera/orion/hercules/derecho] ACCOUNT: [account name] workflow: USE_CRON_TO_RELAUNCH: true CRON_RELAUNCH_INTVL_MNTS: 3 - EXPT_SUBDIR: aqm_community_aqmna13 + EXPT_SUBDIR: aqm_AQMNA13km_warmstart PREDEF_GRID_NAME: AQM_NA_13km CCPP_PHYS_SUITE: FV3_GFS_v16 - DATE_FIRST_CYCL: '2023021700' - DATE_LAST_CYCL: '2023021706' - INCR_CYCL_FREQ: 6 - FCST_LEN_HRS: -1 - FCST_LEN_CYCL: - - 6 - - 12 + DATE_FIRST_CYCL: '2023111000' + DATE_LAST_CYCL: '2023111100' + INCR_CYCL_FREQ: 24 + FCST_LEN_HRS: 24 PREEXISTING_DIR_METHOD: rename VERBOSE: true - DEBUG: true + DEBUG: false COMPILER: intel DIAG_TABLE_TMPL_FN: diag_table_aqm.FV3_GFS_v16 FIELD_TABLE_TMPL_FN: field_table_aqm.FV3_GFS_v16 DO_REAL_TIME: false + COLDSTART: false # set to true for cold start + WARMSTART_CYCLE_DIR: '/scratch2/NAGAPE/epic/SRW-AQM_DATA/aqm_data/restart/2023111000' # for hera +# WARMSTART_CYCLE_DIR: '/work/noaa/epic/SRW-AQM_DATA/aqm_data/restart/2023111000' # for orion/hercules +# WARMSTART_CYCLE_DIR: '/glade/work/chanhooj/SRW-AQM_DATA/aqm_data/restart/2023111000' # for derecho nco: + envir_default: test_aqm_warmstart NET_default: aqm + RUN_default: aqm rocoto: tasks: taskgroups: '{{ ["parm/wflow/prep.yaml", "parm/wflow/aqm_prep.yaml", "parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' - task_aqm_ics_ext: +# task_aqm_ics_ext: # uncomment this in case of COLDSTART: true metatask_run_ensemble: task_run_fcst_mem#mem#: - walltime: 02:00:00 + walltime: 01:20:00 task_get_extrn_ics: EXTRN_MDL_NAME_ICS: FV3GFS FV3GFS_FILE_FMT_ICS: netcdf @@ -47,7 +50,7 @@ task_run_fcst: LAYOUT_X: 50 LAYOUT_Y: 34 BLOCKSIZE: 16 - RESTART_INTERVAL: 6 + RESTART_INTERVAL: 12 24 QUILTING: true PRINT_ESMF: false DO_FCST_RESTART: false diff --git a/ush/config.nco.yaml b/ush/config.nco.yaml deleted file mode 100644 index afcce0ba8a..0000000000 --- a/ush/config.nco.yaml +++ /dev/null @@ -1,41 +0,0 @@ -metadata: - description: >- - Sample nco config -user: - RUN_ENVIR: nco - MACHINE: hera - ACCOUNT: an_account -workflow: - USE_CRON_TO_RELAUNCH: false - EXPT_SUBDIR: test_nco - CCPP_PHYS_SUITE: FV3_GFS_v16 - PREDEF_GRID_NAME: RRFS_CONUS_25km - DATE_FIRST_CYCL: '2022040700' - DATE_LAST_CYCL: '2022040700' - FCST_LEN_HRS: 6 - PREEXISTING_DIR_METHOD: rename - VERBOSE: true - COMPILER: intel -nco: - model_ver_default: v1.0 - RUN_default: srw_test -task_get_extrn_ics: - EXTRN_MDL_NAME_ICS: FV3GFS - FV3GFS_FILE_FMT_ICS: grib2 -task_get_extrn_lbcs: - EXTRN_MDL_NAME_LBCS: FV3GFS - LBC_SPEC_INTVL_HRS: 3 - FV3GFS_FILE_FMT_LBCS: grib2 -task_run_fcst: - WRITE_DOPOST: true - QUILTING: true -task_plot_allvars: - COMOUT_REF: "" -task_run_post: - POST_OUTPUT_DOMAIN_NAME: conus_25km -rocoto: - tasks: - taskgroups: '{{ ["parm/wflow/coldstart.yaml", "parm/wflow/post.yaml"]|include }}' - metatask_run_ensemble: - task_run_fcst_mem#mem#: - walltime: 01:00:00 diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index b35b6108c7..90651c1b7f 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -245,8 +245,8 @@ platform: # RUN_CMD_SERIAL: # The run command for some serial jobs # - # RUN_CMD_AQM: - # The run command for some AQM tasks. + # RUN_CMD_NEXUS: + # The run command for the AQM NEXUS tasks. # # RUN_CMD_AQMLBC: # The run command for the AQM_LBCS task. @@ -258,9 +258,8 @@ platform: RUN_CMD_FCST: "" RUN_CMD_POST: "" RUN_CMD_PRDGEN: "" - RUN_CMD_AQM: "" + RUN_CMD_NEXUS: "" RUN_CMD_AQMLBC: "" - # #----------------------------------------------------------------------- # @@ -421,6 +420,12 @@ platform: # FIXshp: # System directory where the graphics shapefiles are located. # + # FIXaqm: + # System directory where AQM data files are located + # + # FIXemis: + # System directory where AQM emission data files are located. + # # FIXcrtm: # System directory where CRTM fixed files are located # @@ -435,6 +440,8 @@ platform: FIXorg: "" FIXsfc: "" FIXshp: "" + FIXaqm: "" + FIXemis: "" FIXcrtm: "" FIXcrtmupp: "" # @@ -461,7 +468,7 @@ workflow: # #----------------------------------------------------------------------- # - WORKFLOW_ID: !nowtimestamp '' + WORKFLOW_ID: "" # #----------------------------------------------------------------------- # @@ -610,9 +617,9 @@ workflow: # NUOPC/ESMF main component (ufs-weather-model: model_config). Its default # value is the name of the file that the ufs weather model expects to read in. # - # NEMS_CONFIG_FN: - # Name of a template file that contains information about the various NEMS - # components and their run sequence (ufs-weather-model: nems.configure). + # UFS_CONFIG_FN: + # Name of a template file that contains information about the various UFS + # components and their run sequence (ufs-weather-model: ufs.configure). # Its default value is the name of the file that the ufs weather model expects # to read in. # @@ -641,7 +648,7 @@ workflow: DIAG_TABLE_TMPL_FN: 'diag_table.{{ CCPP_PHYS_SUITE }}' FIELD_TABLE_TMPL_FN: 'field_table.{{ CCPP_PHYS_SUITE }}' MODEL_CONFIG_FN: "model_configure" - NEMS_CONFIG_FN: "nems.configure" + UFS_CONFIG_FN: "ufs.configure" AQM_RC_FN: "aqm.rc" AQM_RC_TMPL_FN: "aqm.rc" @@ -668,8 +675,8 @@ workflow: # MODEL_CONFIG_TMPL_FP: # Path to the MODEL_CONFIG_FN file. # - # NEMS_CONFIG_TMPL_FP: - # Path to the NEMS_CONFIG_FN file. + # UFS_CONFIG_TMPL_FP: + # Path to the UFS_CONFIG_FN file. # # AQM_RC_TMPL_FP: # Path to the AQM_RC_TMPL_FN file. @@ -684,7 +691,7 @@ workflow: DIAG_TABLE_TMPL_FP: '{{ [user.PARMdir, DIAG_TABLE_TMPL_FN]|path_join }}' FIELD_TABLE_TMPL_FP: '{{ [user.PARMdir, FIELD_TABLE_TMPL_FN]|path_join }}' MODEL_CONFIG_TMPL_FP: '{{ [user.PARMdir, MODEL_CONFIG_FN]|path_join }}' - NEMS_CONFIG_TMPL_FP: '{{ [user.PARMdir, NEMS_CONFIG_FN]|path_join }}' + UFS_CONFIG_TMPL_FP: '{{ [user.PARMdir, UFS_CONFIG_FN]|path_join }}' AQM_RC_TMPL_FP: '{{ [user.PARMdir, AQM_RC_TMPL_FN]|path_join }}' # @@ -697,8 +704,8 @@ workflow: # FIELD_TABLE_FP: # Path to the field table in the experiment directory. # - # NEMS_CONFIG_FP: - # Path to the NEMS_CONFIG_FN file in the experiment directory. + # UFS_CONFIG_FP: + # Path to the UFS_CONFIG_FN file in the experiment directory. # # FV3_NML_FP: # Path to the FV3_NML_FN file in the experiment directory. @@ -711,13 +718,11 @@ workflow: # script creates and that defines the workflow for the experiment. # # GLOBAL_VAR_DEFNS_FN: - # Name of file (a shell script) containing the definitions of the primary - # experiment variables (parameters) defined in this default configuration - # script and in the user-specified configuration as well as secondary - # experiment variables generated by the experiment generation script. - # This file is sourced by many scripts (e.g. the J-job scripts corresponding - # to each workflow task) in order to make all the experiment variables - # available in those scripts. + # Name of the experiment configuration file. It contains the primary + # experiment variables defined in this default configuration script and in the + # user-specified configuration as well as secondary experiment variables + # generated by the experiment generation script. This file is the primary + # source of information used in the scripts at run time. # # ROCOTO_YAML_FN: # Name of the YAML file containing the YAML workflow definition from @@ -759,13 +764,13 @@ workflow: # DATA_TABLE_FP: '{{ [EXPTDIR, DATA_TABLE_FN]|path_join }}' FIELD_TABLE_FP: '{{ [EXPTDIR, FIELD_TABLE_FN]|path_join }}' - NEMS_CONFIG_FP: '{{ [EXPTDIR, NEMS_CONFIG_FN]|path_join }}' + UFS_CONFIG_FP: '{{ [EXPTDIR, UFS_CONFIG_FN]|path_join }}' FV3_NML_FP: '{{ [EXPTDIR, FV3_NML_FN]|path_join }}' FV3_NML_STOCH_FP: '{{ [EXPTDIR, [FV3_NML_FN, "_stoch"]|join ]|path_join }}' FCST_MODEL: "ufs-weather-model" WFLOW_XML_FN: "FV3LAM_wflow.xml" - GLOBAL_VAR_DEFNS_FN: "var_defns.sh" + GLOBAL_VAR_DEFNS_FN: "var_defns.yaml" ROCOTO_YAML_FN: "rocoto_defns.yaml" EXTRN_MDL_VAR_DEFNS_FN: "extrn_mdl_var_defns" WFLOW_LAUNCH_SCRIPT_FN: "launch_FV3LAM_wflow.sh" @@ -846,7 +851,7 @@ workflow: CCPP_PHYS_SUITE_FN: 'suite_{{ CCPP_PHYS_SUITE }}.xml' CCPP_PHYS_SUITE_IN_CCPP_FP: '{{ [user.UFS_WTHR_MDL_DIR, "FV3", "ccpp", "suites", CCPP_PHYS_SUITE_FN] |path_join }}' CCPP_PHYS_SUITE_FP: '{{ [workflow.EXPTDIR, CCPP_PHYS_SUITE_FN]|path_join }}' - CCPP_PHYS_DIR: '{{ [user.UFS_WTHR_MDL_DIR, "FV3", "ccpp", "physics", "physics"] |path_join }}' + CCPP_PHYS_DIR: '{{ [user.UFS_WTHR_MDL_DIR, "FV3", "ccpp", "physics", "physics", "SFC_Models", "Land", "Noahmp"] |path_join }}' # #----------------------------------------------------------------------- # @@ -866,7 +871,7 @@ workflow: # #----------------------------------------------------------------------- # - FIELD_DICT_FN: "fd_nems.yaml" + FIELD_DICT_FN: "fd_ufs.yaml" FIELD_DICT_IN_UWM_FP: '{{ [user.UFS_WTHR_MDL_DIR, "tests", "parm", FIELD_DICT_FN]|path_join }}' FIELD_DICT_FP: '{{ [workflow.EXPTDIR, FIELD_DICT_FN]|path_join }}' # @@ -1116,31 +1121,8 @@ nco: # Name of model run (third level of com directory structure). # In general, same as ${NET_default}. # - # OPSROOT_default: - # The operations root directory in NCO mode. - # - # COMROOT_default: - # The com root directory for input/output data that is located on - # the current system. - # - # DATAROOT_default: - # Directory containing the (temporary) working directory for running - # jobs. - # - # DCOMROOT_default: - # dcom root directory, which contains input/incoming data that is - # retrieved from outside WCOSS. - # - # LOGBASEDIR_default: - # Directory in which the log files from the workflow tasks will be placed. - # - # COMIN_BASEDIR: - # com directory for current model's input data, typically - # $COMROOT/$NET/$model_ver/$RUN.$PDY - # - # COMOUT_BASEDIR: - # com directory for current model's output data, typically - # $COMROOT/$NET/$model_ver/$RUN.$PDY + # PTMP: + # User-defined path to the com type directories (OPSROOT=$PTMP/$envir). # # DBNROOT_default: # Root directory for the data-alerting utilities. @@ -1174,26 +1156,20 @@ nco: # #----------------------------------------------------------------------- # - envir_default: "para" + envir_default: "test" NET_default: "srw" RUN_default: "srw" model_ver_default: "v1.0.0" - OPSROOT_default: '{{ workflow.EXPT_BASEDIR }}/../nco_dirs' - COMROOT_default: '{{ OPSROOT_default }}/com' - DATAROOT_default: '{{ OPSROOT_default }}/tmp' - DCOMROOT_default: '{{ OPSROOT_default }}/dcom' - LOGBASEDIR_default: '{% if user.RUN_ENVIR == "nco" %}{{ [OPSROOT_default, "output"]|path_join }}{% else %}{{ [workflow.EXPTDIR, "log"]|path_join }}{% endif %}' - COMIN_BASEDIR: '{{ COMROOT_default }}/{{ NET_default }}/{{ model_ver_default }}' - COMOUT_BASEDIR: '{{ COMROOT_default }}/{{ NET_default }}/{{ model_ver_default }}' + PTMP: '{{ workflow.EXPT_BASEDIR }}/../nco_dirs' DBNROOT_default: "" - SENDECF_default: false - SENDDBN_default: false - SENDDBN_NTC_default: false - SENDCOM_default: false - SENDWEB_default: false - KEEPDATA_default: true + SENDECF_default: "NO" + SENDDBN_default: "NO" + SENDDBN_NTC_default: "NO" + SENDCOM_default: "YES" + SENDWEB_default: "NO" + KEEPDATA_default: "YES" MAILTO_default: "" MAILCC_default: "" @@ -1757,8 +1733,8 @@ task_run_fcst: #----------------------------------------------------------------------- # KMP_AFFINITY_RUN_FCST: "scatter" - OMP_NUM_THREADS_RUN_FCST: 1 # ATM_omp_num_threads in nems.configure - OMP_STACKSIZE_RUN_FCST: "512m" + OMP_NUM_THREADS_RUN_FCST: 2 # atmos_nthreads in model_configure + OMP_STACKSIZE_RUN_FCST: "1024m" # #----------------------------------------------------------------------- # @@ -2173,6 +2149,10 @@ task_nexus_emission: # PPN_NEXUS_EMISSION: # Processes per node for the nexus_emission_* tasks. # + # NNODES_NEXUS_EMISSION: + # The number of nodes to request from the job scheduler + # for the nexus emission task. + # # KMP_AFFINITY_NEXUS_EMISSION: # Intel Thread Affinity Interface for the nexus_emission_* tasks. # @@ -2183,10 +2163,22 @@ task_nexus_emission: # Controls the size of the stack for threads created by the OpenMP implementation. #------------------------------------------------------------------------------- PPN_NEXUS_EMISSION: '{{ platform.NCORES_PER_NODE // OMP_NUM_THREADS_NEXUS_EMISSION }}' + NNODES_NEXUS_EMISSION: 4 KMP_AFFINITY_NEXUS_EMISSION: "scatter" OMP_NUM_THREADS_NEXUS_EMISSION: 2 OMP_STACKSIZE_NEXUS_EMISSION: "1024m" +#----------------------------- +# POINT_SOURCE config parameters +#----------------------------- +task_point_source: + #------------------------------------------------------------------------------- + # PT_SRC_SUBDIR: + # Sub-directory structure of point source data under FIXemis. + # Full path: FIXemis/PT_SRC_SUBDIR + #------------------------------------------------------------------------------- + PT_SRC_SUBDIR: "NEI2016v1/v2023-01-PT" + #---------------------------- # BIAS_CORRECTION_O3 config parameters #----------------------------- @@ -2574,41 +2566,15 @@ cpl_aqm_parm: # # DO_AQM_SAVE_FIRE: # Archive fire emission file to HPSS - # - # DCOMINbio_default: - # Path to the directory containing AQM bio files # - # DCOMINdust_default: - # Path to the directory containing AQM dust file + # COMINairnow_default: + # Path to the directory containing AIRNOW observation data # - # DCOMINcanopy_default: - # Path to the directory containing AQM canopy files - # - # DCOMINfire_default: + # COMINfire_default: # Path to the directory containing AQM fire files # - # DCOMINchem_lbcs_default: - # Path to the directory containing chemical LBC files - # - # DCOMINgefs_default: + # COMINgefs_default: # Path to the directory containing GEFS aerosol LBC files - # - # DCOMINpt_src_default: - # Parent directory containing point source files - # - # DCOMINairnow_default: - # Path to the directory containing AIRNOW observation data - # - # COMINbicor: - # Path of reading in historical training data for biascorrection - # - # COMOUTbicor: - # Path to save the current cycle's model output and AirNow obs as - # training data for future use. $COMINbicor and $COMOUTbicor can be - # distinguished by the ${yyyy}${mm}${dd} under the same location - # - # AQM_CONFIG_DIR: - # Configuration directory for AQM # # AQM_BIO_FILE: # File name of AQM BIO file @@ -2634,9 +2600,6 @@ cpl_aqm_parm: # AQM_FIRE_FILE_OFFSET_HRS: # Time offset when retrieving fire emission data files. # - # AQM_FIRE_ARCHV_DIR: - # Path to the archive directory for RAVE emission files on HPSS - # # AQM_RC_FIRE_FREQUENCY: # Fire frequency in aqm.rc # @@ -2655,13 +2618,6 @@ cpl_aqm_parm: # AQM_GEFS_FILE_CYC: # Cycle of the GEFS aerosol LBC files only if it is fixed # - # NEXUS_INPUT_DIR: - # Same as GRID_DIR but for the the air quality emission generation task. - # Should be blank for the default value specified in setup.sh - # - # NEXUS_FIX_DIR: - # Directory containing grid_spec files as the input file of nexus - # # NEXUS_GRID_FN: # File name of the input grid_spec file of nexus # @@ -2690,18 +2646,10 @@ cpl_aqm_parm: DO_AQM_SAVE_AIRNOW_HIST: false DO_AQM_SAVE_FIRE: false - DCOMINbio_default: "" - DCOMINdust_default: "/path/to/dust/dir" - DCOMINcanopy_default: "/path/to/canopy/dir" - DCOMINfire_default: "" - DCOMINchem_lbcs_default: "" - DCOMINgefs_default: "" - DCOMINpt_src_default: "/path/to/point/source/base/directory" - DCOMINairnow_default: "/path/to/airnow/obaservation/data" - COMINbicor: "/path/to/historical/airnow/data/dir" - COMOUTbicor: "/path/to/historical/airnow/data/dir" + COMINairnow_default: "/path/to/airnow/obaservation/data" + COMINfire_default: "" + COMINgefs_default: "" - AQM_CONFIG_DIR: "" AQM_BIO_FILE: "BEIS_SARC401.ncf" AQM_DUST_FILE_PREFIX: "FENGSHA_p8_10km_inputs" @@ -2713,7 +2661,6 @@ cpl_aqm_parm: AQM_FIRE_FILE_PREFIX: "GBBEPx_C401GRID.emissions_v003" AQM_FIRE_FILE_SUFFIX: ".nc" AQM_FIRE_FILE_OFFSET_HRS: 0 - AQM_FIRE_ARCHV_DIR: "/path/to/archive/dir/for/RAVE/on/HPSS" AQM_RC_FIRE_FREQUENCY: "static" AQM_RC_PRODUCT_FN: "aqm.prod.nc" @@ -2724,8 +2671,6 @@ cpl_aqm_parm: AQM_GEFS_FILE_PREFIX: "geaer" AQM_GEFS_FILE_CYC: "" - NEXUS_INPUT_DIR: "" - NEXUS_FIX_DIR: "" NEXUS_GRID_FN: "grid_spec_GSD_HRRR_25km.nc" NUM_SPLIT_NEXUS: 3 NEXUS_GFS_SFC_OFFSET_HRS: 0 diff --git a/ush/create_aqm_rc_file.py b/ush/create_aqm_rc_file.py index 5608e4cbf2..c37ed05d29 100644 --- a/ush/create_aqm_rc_file.py +++ b/ush/create_aqm_rc_file.py @@ -6,15 +6,14 @@ import argparse import os import sys -import tempfile -from subprocess import STDOUT, CalledProcessError, check_output from textwrap import dedent +from uwtools.api.template import render from python_utils import ( cfg_to_yaml_str, flatten_dict, import_vars, - load_shell_config, + load_yaml_config, print_info_msg, print_input_args, str_to_type, @@ -61,25 +60,23 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): # # Set parameters in the aqm.rc file. # - aqm_rc_bio_file_fp=os.path.join(DCOMINbio, AQM_BIO_FILE) + aqm_rc_bio_file_fp=os.path.join(FIXaqm,"bio", AQM_BIO_FILE) # Fire config aqm_rc_fire_file_fp=os.path.join( COMIN, - "FIRE_EMISSION", f"{AQM_FIRE_FILE_PREFIX}_{yyyymmdd}_t{hh}z{AQM_FIRE_FILE_SUFFIX}" ) # Dust config aqm_rc_dust_file_fp=os.path.join( - DCOMINdust, + FIXaqm,"dust", f"{AQM_DUST_FILE_PREFIX}_{PREDEF_GRID_NAME}{AQM_DUST_FILE_SUFFIX}", ) # Canopy config aqm_rc_canopy_file_fp=os.path.join( - DCOMINcanopy, - PREDEF_GRID_NAME, + FIXaqm,"canopy",PREDEF_GRID_NAME, f"{AQM_CANOPY_FILE_PREFIX}.{mm}{AQM_CANOPY_FILE_SUFFIX}", ) # @@ -96,10 +93,9 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): "do_aqm_canopy": DO_AQM_CANOPY, "do_aqm_product": DO_AQM_PRODUCT, "ccpp_phys_suite": CCPP_PHYS_SUITE, - "aqm_config_dir": AQM_CONFIG_DIR, "init_concentrations": init_concentrations, "aqm_rc_bio_file_fp": aqm_rc_bio_file_fp, - "dcominbio": DCOMINbio, + "fixaqm": FIXaqm, "aqm_rc_fire_file_fp": aqm_rc_fire_file_fp, "aqm_rc_fire_frequency": AQM_RC_FIRE_FREQUENCY, "aqm_rc_dust_file_fp": aqm_rc_dust_file_fp, @@ -127,36 +123,11 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): # #----------------------------------------------------------------------- # - with tempfile.NamedTemporaryFile( - dir="./", - mode="w+t", - prefix="aqm_rc_settings", - suffix=".yaml") as tmpfile: - tmpfile.write(settings_str) - tmpfile.seek(0) - cmd = " ".join(["uw template render", - "-i", - AQM_RC_TMPL_FP, - "-o", - aqm_rc_fp, - "-v", - "--values-file", - tmpfile.name, - ] - ) - indent = " " - output = "" - try: - output = check_output(cmd, encoding="utf=8", shell=True, - stderr=STDOUT, text=True) - except CalledProcessError as e: - output = e.output - print(f"Failed with status: {e.returncode}") - sys.exit(1) - finally: - print("Output:") - for line in output.split("\n"): - print(f"{indent * 2}{line}") + render( + input_file = AQM_RC_TMPL_FP, + output_file = aqm_rc_fp, + values_src = settings, + ) return True def parse_args(argv): @@ -187,7 +158,7 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + cfg = load_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) import_vars(dictionary=cfg) create_aqm_rc_file( diff --git a/ush/create_diag_table_file.py b/ush/create_diag_table_file.py index 40f5e0deee..113953172d 100644 --- a/ush/create_diag_table_file.py +++ b/ush/create_diag_table_file.py @@ -7,15 +7,14 @@ import argparse import os import sys -import tempfile -from subprocess import STDOUT, CalledProcessError, check_output from textwrap import dedent +from uwtools.api.template import render from python_utils import ( cfg_to_yaml_str, flatten_dict, import_vars, - load_shell_config, + load_yaml_config, print_info_msg, print_input_args, ) @@ -74,32 +73,11 @@ def create_diag_table_file(run_dir): verbose=VERBOSE, ) - with tempfile.NamedTemporaryFile(dir="./", - mode="w+t", - prefix="aqm_rc_settings", - suffix=".yaml") as tmpfile: - tmpfile.write(settings_str) - tmpfile.seek(0) - cmd = " ".join(["uw template render", - "-i", DIAG_TABLE_TMPL_FP, - "-o", diag_table_fp, - "-v", - "--values-file", tmpfile.name, - ] + render( + input_file = DIAG_TABLE_TMPL_FP, + output_file = diag_table_fp, + values_src = settings, ) - indent = " " - output = "" - try: - output = check_output(cmd, encoding="utf=8", shell=True, - stderr=STDOUT, text=True) - except CalledProcessError as e: - output = e.output - print(f"Failed with status: {e.returncode}") - sys.exit(1) - finally: - print("Output:") - for line in output.split("\n"): - print(f"{indent * 2}{line}") return True @@ -124,7 +102,7 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + cfg = load_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) import_vars(dictionary=cfg) create_diag_table_file(args.run_dir) diff --git a/ush/create_model_configure_file.py b/ush/create_model_configure_file.py index c2778f1be5..b8767f635a 100644 --- a/ush/create_model_configure_file.py +++ b/ush/create_model_configure_file.py @@ -6,15 +6,14 @@ import argparse import os import sys -import tempfile from textwrap import dedent -from subprocess import STDOUT, CalledProcessError, check_output +from uwtools.api.template import render from python_utils import ( cfg_to_yaml_str, flatten_dict, import_vars, - load_shell_config, + load_yaml_config, lowercase, print_info_msg, print_input_args, @@ -71,6 +70,7 @@ def create_model_configure_file( # ----------------------------------------------------------------------- # settings = { + "PE_MEMBER01": PE_MEMBER01, "start_year": cdate.year, "start_month": cdate.month, "start_day": cdate.day, @@ -78,6 +78,7 @@ def create_model_configure_file( "nhours_fcst": fcst_len_hrs, "fhrot": fhrot, "dt_atmos": DT_ATMOS, + "atmos_nthreads": OMP_NUM_THREADS_RUN_FCST, "restart_interval": RESTART_INTERVAL, "itasks": ITASKS, "write_dopost": f".{lowercase(str(WRITE_DOPOST))}.", @@ -218,32 +219,11 @@ def create_model_configure_file( # model_config_fp = os.path.join(run_dir, MODEL_CONFIG_FN) - with tempfile.NamedTemporaryFile(dir="./", - mode="w+t", - suffix=".yaml", - prefix="model_config_settings.") as tmpfile: - tmpfile.write(settings_str) - tmpfile.seek(0) - cmd = " ".join(["uw template render", - "-i", MODEL_CONFIG_TMPL_FP, - "-o", model_config_fp, - "-v", - "--values-file", tmpfile.name, - ] + render( + input_file = MODEL_CONFIG_TMPL_FP, + output_file = model_config_fp, + values_src = settings ) - indent = " " - output = "" - try: - output = check_output(cmd, encoding="utf=8", shell=True, - stderr=STDOUT, text=True) - except CalledProcessError as e: - output = e.output - print(f"Failed with status: {e.returncode}") - sys.exit(1) - finally: - print("Output:") - for line in output.split("\n"): - print(f"{indent * 2}{line}") return True @@ -316,7 +296,7 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + cfg = load_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) import_vars(dictionary=cfg) create_model_configure_file( diff --git a/ush/create_nems_configure_file.py b/ush/create_ufs_configure_file.py similarity index 56% rename from ush/create_nems_configure_file.py rename to ush/create_ufs_configure_file.py index a6ba1cbd6b..3fd82f488b 100644 --- a/ush/create_nems_configure_file.py +++ b/ush/create_ufs_configure_file.py @@ -1,28 +1,27 @@ #!/usr/bin/env python3 """ -Function to create a NEMS configuration file for the FV3 forecast +Function to create a UFS configuration file for the FV3 forecast model(s) from a template. """ import argparse import os import sys -import tempfile -from subprocess import STDOUT, CalledProcessError, check_output from textwrap import dedent +from uwtools.api.template import render from python_utils import ( cfg_to_yaml_str, flatten_dict, import_vars, - load_shell_config, + load_yaml_config, print_info_msg, print_input_args, ) -def create_nems_configure_file(run_dir): - """ Creates a nems configuration file in the specified +def create_ufs_configure_file(run_dir): + """ Creates a ufs configuration file in the specified run directory Args: @@ -41,19 +40,18 @@ def create_nems_configure_file(run_dir): # #----------------------------------------------------------------------- # - # Create a NEMS configuration file in the specified run directory. + # Create a UFS configuration file in the specified run directory. # #----------------------------------------------------------------------- # print_info_msg(f''' - Creating a nems.configure file (\"{NEMS_CONFIG_FN}\") in the specified + Creating a ufs.configure file (\"{UFS_CONFIG_FN}\") in the specified run directory (run_dir): run_dir = \"{run_dir}\"''', verbose=VERBOSE) # # Set output file path # - nems_config_fp = os.path.join(run_dir, NEMS_CONFIG_FN) - pe_member01_m1 = str(int(PE_MEMBER01)-1) + ufs_config_fp = os.path.join(run_dir, UFS_CONFIG_FN) # #----------------------------------------------------------------------- # @@ -66,16 +64,14 @@ def create_nems_configure_file(run_dir): settings = { "dt_atmos": DT_ATMOS, "print_esmf": PRINT_ESMF, - "cpl_aqm": CPL_AQM, - "pe_member01_m1": pe_member01_m1, - "atm_omp_num_threads": OMP_NUM_THREADS_RUN_FCST, + "cpl_aqm": CPL_AQM } settings_str = cfg_to_yaml_str(settings) print_info_msg( dedent( f""" - The variable \"settings\" specifying values to be used in the \"{NEMS_CONFIG_FN}\" + The variable \"settings\" specifying values to be used in the \"{UFS_CONFIG_FN}\" file has been set as follows:\n settings =\n\n""" ) @@ -85,46 +81,22 @@ def create_nems_configure_file(run_dir): # #----------------------------------------------------------------------- # - # Call a python script to generate the experiment's actual NEMS_CONFIG_FN + # Call a python script to generate the experiment's actual UFS_CONFIG_FN # file from the template file. # #----------------------------------------------------------------------- # - # Store the settings in a temporary file - with tempfile.NamedTemporaryFile(dir="./", - mode="w+t", - prefix="nems_config_settings", - suffix=".yaml") as tmpfile: - tmpfile.write(settings_str) - tmpfile.seek(0) - - cmd = " ".join(["uw template render", - "-i", NEMS_CONFIG_TMPL_FP, - "-o", nems_config_fp, - "-v", - "--values-file", tmpfile.name, - ] + render( + input_file = UFS_CONFIG_TMPL_FP, + output_file = ufs_config_fp, + values_src = settings, ) - - indent = " " - output = "" - try: - output = check_output(cmd, encoding="utf=8", shell=True, - stderr=STDOUT, text=True) - except CalledProcessError as e: - output = e.output - print(f"Failed with status: {e.returncode}") - sys.exit(1) - finally: - print("Output:") - for line in output.split("\n"): - print(f"{indent * 2}{line}") return True def parse_args(argv): """ Parse command line arguments""" parser = argparse.ArgumentParser( - description='Creates NEMS configuration file.' + description='Creates UFS configuration file.' ) parser.add_argument("-r", "--run-dir", @@ -141,9 +113,9 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + cfg = load_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) import_vars(dictionary=cfg) - create_nems_configure_file( + create_ufs_configure_file( run_dir=args.run_dir, ) diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index a678a61132..c671a69da8 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -11,10 +11,15 @@ import logging import os import sys -from subprocess import STDOUT, CalledProcessError, check_output +from stat import S_IXUSR +from string import Template from textwrap import dedent +from uwtools.api.config import get_nml_config, get_yaml_config, realize +from uwtools.api.template import render + from python_utils import ( + list_to_str, log_info, import_vars, export_vars, @@ -22,7 +27,6 @@ ln_vrfy, mkdir_vrfy, mv_vrfy, - create_symlink_to_file, check_for_preexist_dir_file, cfg_to_yaml_str, find_pattern_in_str, @@ -30,9 +34,8 @@ ) from setup import setup -from set_FV3nml_sfc_climo_filenames import set_FV3nml_sfc_climo_filenames +from set_fv3nml_sfc_climo_filenames import set_fv3nml_sfc_climo_filenames from get_crontab_contents import add_crontab_line -from set_namelist import set_namelist from check_python_version import check_python_version # pylint: disable=too-many-locals,too-many-branches, too-many-statements @@ -111,29 +114,11 @@ def generate_FV3LAM_wflow( # Call the python script to generate the experiment's XML file # rocoto_yaml_fp = expt_config["workflow"]["ROCOTO_YAML_FP"] - cmd = " ".join(["uw template render", - "-i", template_xml_fp, - "-o", wflow_xml_fp, - "-v", - "--values-file", rocoto_yaml_fp, - ] - ) - - indent = " " - output = "" - logfunc = logging.info - try: - output = check_output(cmd, encoding="utf=8", shell=True, - stderr=STDOUT, text=True) - except CalledProcessError as e: - logfunc = logging.error - output = e.output - logging.exception(("Failed with status: %s", e.returncode)) - raise - finally: - logfunc("Output:") - for line in output.split("\n"): - logfunc("%s%s", indent * 2, line) + render( + input_file = template_xml_fp, + output_file = wflow_xml_fp, + values_src = rocoto_yaml_fp, + ) # # ----------------------------------------------------------------------- # @@ -154,9 +139,23 @@ def generate_FV3LAM_wflow( verbose=debug, ) - create_symlink_to_file( - wflow_launch_script_fp, os.path.join(exptdir, wflow_launch_script_fn), False - ) + with open(wflow_launch_script_fp, "r", encoding='utf-8') as launch_script_file: + launch_script_content = launch_script_file.read() + + # Stage an experiment-specific launch file in the experiment directory + template = Template(launch_script_content) + + # The script needs several variables from the workflow and user sections + template_variables = {**expt_config["user"], **expt_config["workflow"], + "valid_vals_BOOLEAN": list_to_str(expt_config["constants"]["valid_vals_BOOLEAN"])} + launch_content = template.safe_substitute(template_variables) + + launch_fp = os.path.join(exptdir, wflow_launch_script_fn) + with open(launch_fp, "w", encoding='utf-8') as expt_launch_fn: + expt_launch_fn.write(launch_content) + + os.chmod(launch_fp, os.stat(launch_fp).st_mode|S_IXUSR) + # # ----------------------------------------------------------------------- # @@ -506,24 +505,23 @@ def generate_FV3LAM_wflow( # # ----------------------------------------------------------------------- # - # Call the set_namelist.py script to create a new FV3 namelist file (full - # path specified by FV3_NML_FP) using the file FV3_NML_BASE_SUITE_FP as - # the base (i.e. starting) namelist file, with physics-suite-dependent - # modifications to the base file specified in the yaml configuration file - # FV3_NML_YAML_CONFIG_FP (for the physics suite specified by CCPP_PHYS_SUITE), - # and with additional physics-suite-independent modifications specified - # in the variable "settings" set above. + # Create a new FV3 namelist file # # ----------------------------------------------------------------------- # - args=[ "-n", FV3_NML_BASE_SUITE_FP, - "-c", FV3_NML_YAML_CONFIG_FP, CCPP_PHYS_SUITE, - "-u", settings_str, - "-o", FV3_NML_FP, - ] - if not debug: - args.append("-q") - set_namelist(args) + + physics_cfg = get_yaml_config(FV3_NML_YAML_CONFIG_FP) + base_namelist = get_nml_config(FV3_NML_BASE_SUITE_FP) + base_namelist.update_values(physics_cfg[CCPP_PHYS_SUITE]) + base_namelist.update_values(settings) + for sect, values in base_namelist.copy().items(): + if not values: + del base_namelist[sect] + continue + for k, v in values.copy().items(): + if v is None: + del base_namelist[sect][k] + base_namelist.dump(FV3_NML_FP) # # If not running the TN_MAKE_GRID task (which implies the workflow will # use pregenerated grid files), set the namelist variables specifying @@ -538,7 +536,7 @@ def generate_FV3LAM_wflow( # if not expt_config['rocoto']['tasks'].get('task_make_grid'): - set_FV3nml_sfc_climo_filenames(debug) + set_fv3nml_sfc_climo_filenames(flatten_dict(expt_config), debug) # # ----------------------------------------------------------------------- @@ -652,14 +650,13 @@ def generate_FV3LAM_wflow( #----------------------------------------------------------------------- # if any((DO_SPP, DO_SPPT, DO_SHUM, DO_SKEB, DO_LSM_SPP)): - - args=[ "-n", FV3_NML_FP, - "-u", settings_str, - "-o", FV3_NML_STOCH_FP, - ] - if not debug: - args.append("-q") - set_namelist(args) + realize( + input_config=FV3_NML_FP, + input_format="nml", + output_file=FV3_NML_STOCH_FP, + output_format="nml", + update_config=get_nml_config(settings), + ) # # ----------------------------------------------------------------------- diff --git a/ush/get_mrms_files.sh b/ush/get_mrms_files.sh index b669094488..65a99cc1bd 100644 --- a/ush/get_mrms_files.sh +++ b/ush/get_mrms_files.sh @@ -54,7 +54,7 @@ function get_mrms_files () { # 10 represents a significant number of vertical levels of data if [ ${numgrib2} -ge 10 ] && [ ! -e filelist_mrms ]; then - cp_vrfy ${nsslfile1} ${output_path} + cp ${nsslfile1} ${output_path} ls ${output_path}/${file_matches} > ${output_path}/filelist_mrms echo "Copying mrms files for ${YYYY}${MM}${DD}-${cyc}${min}" fi diff --git a/ush/job_preamble.sh b/ush/job_preamble.sh index e243f31b37..ecfb94fb50 100644 --- a/ush/job_preamble.sh +++ b/ush/job_preamble.sh @@ -1,5 +1,7 @@ #!/bin/bash +set +u + # #----------------------------------------------------------------------- # @@ -25,10 +27,10 @@ export envir="${envir:-${envir_default}}" export NET="${NET:-${NET_default}}" export RUN="${RUN:-${RUN_default}}" export model_ver="${model_ver:-${model_ver_default}}" -export COMROOT="${COMROOT:-${COMROOT_default}}" -export DATAROOT="${DATAROOT:-${DATAROOT_default}}" -export DCOMROOT="${DCOMROOT:-${DCOMROOT_default}}" -export LOGBASEDIR="${LOGBASEDIR:-${LOGBASEDIR_default}}" +export COMROOT="${COMROOT:-${PTMP}/${envir}/com}" +export DATAROOT="${DATAROOT:-${PTMP}/${envir}/tmp}" +export DCOMROOT="${DCOMROOT:-${PTMP}/${envir}/dcom}" +export DATA_SHARE="${DATA_SHARE:-${DATAROOT}/DATA_SHARE/${PDY}${cyc}}" export DBNROOT="${DBNROOT:-${DBNROOT_default}}" export SENDECF="${SENDECF:-${SENDECF_default}}" @@ -41,49 +43,25 @@ export MAILTO="${MAILTO:-${MAILTO_default}}" export MAILCC="${MAILCC:-${MAILCC_default}}" if [ "${RUN_ENVIR}" = "nco" ]; then + [[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT if [ "${MACHINE}" = "WCOSS2" ]; then - [[ "$WORKFLOW_MANAGER" = "rocoto" ]] && export COMROOT=$COMROOT export COMIN="${COMIN:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" export COMOUT="${COMOUT:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDY}/${cyc})}" export COMINm1="${COMINm1:-$(compath.py -o ${NET}/${model_ver}/${RUN}.${PDYm1})}" export COMINgfs="${COMINgfs:-$(compath.py ${envir}/gfs/${gfs_ver})}" export COMINgefs="${COMINgefs:-$(compath.py ${envir}/gefs/${gefs_ver})}" else - export COMIN="${COMIN_BASEDIR}/${RUN}.${PDY}/${cyc}" - export COMOUT="${COMOUT_BASEDIR}/${RUN}.${PDY}/${cyc}" - export COMINm1="${COMIN_BASEDIR}/${RUN}.${PDYm1}" + export COMIN="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" + export COMOUT="${COMOUT:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}}" + export COMINm1="${COMIN:-${COMROOT}/${NET}/${model_ver}/${RUN}.${PDYm1}/${cyc}}" fi else - export COMIN="${COMIN_BASEDIR}/${PDY}${cyc}" - export COMOUT="${COMOUT_BASEDIR}/${PDY}${cyc}" - export COMINm1="${COMIN_BASEDIR}/${RUN}.${PDYm1}" + export COMIN="${EXPTDIR}/${PDY}${cyc}" + export COMOUT="${EXPTDIR}/${PDY}${cyc}" + export COMINm1="${EXPTDIR}/${PDYm1}${cyc}" fi export COMOUTwmo="${COMOUTwmo:-${COMOUT}/wmo}" -export DCOMINbio="${DCOMINbio:-${DCOMINbio_default}}" -export DCOMINdust="${DCOMINdust:-${DCOMINdust_default}}" -export DCOMINcanopy="${DCOMINcanopy:-${DCOMINcanopy_default}}" -export DCOMINfire="${DCOMINfire:-${DCOMINfire_default}}" -export DCOMINchem_lbcs="${DCOMINchem_lbcs:-${DCOMINchem_lbcs_default}}" -export DCOMINgefs="${DCOMINgefs:-${DCOMINgefs_default}}" -export DCOMINpt_src="${DCOMINpt_src:-${DCOMINpt_src_default}}" -export DCOMINairnow="${DCOMINairnow:-${DCOMINairnow_default}}" - -# -#----------------------------------------------------------------------- -# -# Change YES/NO (NCO standards; job card) to TRUE/FALSE (workflow standards) -# for NCO environment variables -# -#----------------------------------------------------------------------- -# -export KEEPDATA=$(boolify "${KEEPDATA}") -export SENDCOM=$(boolify "${SENDCOM}") -export SENDDBN=$(boolify "${SENDDBN}") -export SENDDBN_NTC=$(boolify "${SENDDBN_NTC}") -export SENDECF=$(boolify "${SENDECF}") -export SENDWEB=$(boolify "${SENDWEB}") - # #----------------------------------------------------------------------- # @@ -91,12 +69,13 @@ export SENDWEB=$(boolify "${SENDWEB}") # #----------------------------------------------------------------------- # -if [ $subcyc -eq 0 ]; then - export cycle="t${cyc}z" +if [ ${subcyc:-0} -ne 0 ]; then + export cycle="t${cyc}${subcyc}z" else - export cycle="t${cyc}${subcyc}z" + export cycle="t${cyc}z" fi -if [ "${RUN_ENVIR}" = "nco" ] && [ "${DO_ENSEMBLE}" = "TRUE" ] && [ ! -z $ENSMEM_INDX ]; then + +if [ "${RUN_ENVIR}" = "nco" ] && [ $(boolify "${DO_ENSEMBLE}") = "TRUE" ] && [ ! -z $ENSMEM_INDX ]; then export dot_ensmem=".mem${ENSMEM_INDX}" else export dot_ensmem= @@ -111,7 +90,7 @@ fi export DATA= if [ "${RUN_ENVIR}" = "nco" ]; then export DATA=${DATAROOT}/${jobid} - mkdir_vrfy -p $DATA + mkdir -p $DATA cd $DATA fi # @@ -197,10 +176,10 @@ export -f POST_STEP # if [ "${RUN_ENVIR}" = "nco" ] && [ "${WORKFLOW_MANAGER}" != "ecflow" ]; then __EXPTLOG=${EXPTDIR}/log - mkdir_vrfy -p ${__EXPTLOG} + mkdir -p ${__EXPTLOG} for i in ${LOGDIR}/*.${WORKFLOW_ID}.log; do __LOGB=$(basename $i .${WORKFLOW_ID}.log) - ln_vrfy -sf $i ${__EXPTLOG}/${__LOGB}.log + ln -sf $i ${__EXPTLOG}/${__LOGB}.log done fi # @@ -238,4 +217,3 @@ In directory: \"${scrfunc_dir}\" ========================================================================" } - diff --git a/ush/launch_FV3LAM_wflow.sh b/ush/launch_FV3LAM_wflow.sh old mode 100755 new mode 100644 index cfbedac9cf..7c26511f4f --- a/ush/launch_FV3LAM_wflow.sh +++ b/ush/launch_FV3LAM_wflow.sh @@ -34,43 +34,10 @@ scrfunc_dir=$( dirname "${scrfunc_fp}" ) # #----------------------------------------------------------------------- # -# Get the experiment directory. We assume that there is a symlink to -# this script in the experiment directory, and this script is called via -# that symlink. Thus, finding the directory in which the symlink is -# located will give us the experiment directory. We find this by first -# obtaining the directory portion (i.e. the portion without the name of -# this script) of the command that was used to called this script (i.e. -# "$0") and then use the "readlink -f" command to obtain the corresponding -# absolute path. This will work for all four of the following ways in -# which the symlink in the experiment directory pointing to this script -# may be called: -# -# 1) Call this script from the experiment directory: -# > cd /path/to/experiment/directory -# > launch_FV3LAM_wflow.sh -# -# 2) Call this script from the experiment directory but using "./" before -# the script name: -# > cd /path/to/experiment/directory -# > ./launch_FV3LAM_wflow.sh -# -# 3) Call this script from any directory using the absolute path to the -# symlink in the experiment directory: -# > /path/to/experiment/directory/launch_FV3LAM_wflow.sh -# -# 4) Call this script from a directory that is several levels up from the -# experiment directory (but not necessarily at the root directory): -# > cd /path/to -# > experiment/directory/launch_FV3LAM_wflow.sh -# -# Note that given just a file name, e.g. the name of this script without -# any path before it, the "dirname" command will return a ".", e.g. in -# bash, -# -# > exptdir=$( dirname "launch_FV3LAM_wflow.sh" ) -# > echo $exptdir -# -# will print out ".". +# This script will be configured for a specific experiment when +# generate_FV3LAM_wflow.py. That process fills in what is necessary so +# this configured script in the experiment directory will need no +# additional information at run time. # #----------------------------------------------------------------------- # @@ -94,7 +61,12 @@ fi # #----------------------------------------------------------------------- # -. $exptdir/var_defns.sh + +# These variables are assumed to exist in the global environment by the +# bash_utils, which is a Very Bad (TM) thing. +export USHdir=$USHdir +export valid_vals_BOOLEAN=${valid_vals_BOOLEAN} + . $USHdir/source_util_funcs.sh # #----------------------------------------------------------------------- @@ -166,7 +138,7 @@ wflow_status="IN PROGRESS" # #----------------------------------------------------------------------- # -cd_vrfy "$exptdir" +cd "$exptdir" # #----------------------------------------------------------------------- # @@ -369,7 +341,7 @@ by expt_name has completed with the following workflow status (wflow_status): # Thus, there is no need to try to relaunch it. We also append a message # to the completion message above to indicate this. # - if [ "${USE_CRON_TO_RELAUNCH}" = "TRUE" ]; then + if [ $(boolify "${USE_CRON_TO_RELAUNCH}") = "TRUE" ]; then msg="${msg}\ Thus, there is no need to relaunch the workflow via a cron job. Removing diff --git a/ush/link_fix.py b/ush/link_fix.py index fdd9a65f28..f0d103d8ea 100755 --- a/ush/link_fix.py +++ b/ush/link_fix.py @@ -18,7 +18,7 @@ cd_vrfy, mkdir_vrfy, find_pattern_in_str, - load_shell_config, + load_yaml_config, ) @@ -403,7 +403,7 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + cfg = load_yaml_config(args.path_to_defns) link_fix( verbose=cfg["workflow"]["VERBOSE"], file_group=args.file_group, diff --git a/ush/load_modules_run_task.sh b/ush/load_modules_run_task.sh index 89f3addf41..5ede278bfd 100755 --- a/ush/load_modules_run_task.sh +++ b/ush/load_modules_run_task.sh @@ -3,33 +3,43 @@ # #----------------------------------------------------------------------- # -# Source necessary files. +# This script loads the appropriate modules for a given task in an +# experiment. # -#----------------------------------------------------------------------- +# It requires the following global environment variables: # -. ${GLOBAL_VAR_DEFNS_FP} -. $USHdir/source_util_funcs.sh +# GLOBAL_VAR_DEFNS_FP # -#----------------------------------------------------------------------- +# And uses these variables from the GLOBAL_VAR_DEFNS_FP file # -# Save current shell options (in a global array). Then set new options -# for this script/function. +# platform: +# BUILD_MOD_FN +# RUN_VER_FN +# +# workflow: +# VERBOSE # #----------------------------------------------------------------------- # -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 + +# Get the location of this file -- it's the USHdir +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +USHdir=$( dirname "${scrfunc_fp}" ) +HOMEdir=$( dirname $USHdir ) + +source $USHdir/source_util_funcs.sh + # #----------------------------------------------------------------------- # -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). +# Save current shell options (in a global array). Then set new options +# for this script/function. # #----------------------------------------------------------------------- # -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) +{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 + # #----------------------------------------------------------------------- # @@ -37,7 +47,7 @@ scrfunc_dir=$( dirname "${scrfunc_fp}" ) # #----------------------------------------------------------------------- # -if [ "$#" -ne 2 ]; then +if [ "$#" -ne 3 ]; then print_err_msg_exit " Incorrect number of arguments specified: @@ -46,15 +56,17 @@ Incorrect number of arguments specified: Usage: - ${scrfunc_fn} task_name jjob_fp + ${scrfunc_fn} machine task_name jjob_fp where the arguments are defined as follows: + machine: The name of the supported platform + task_name: The name of the rocoto task for which this script will load modules and launch the J-job. - jjob_fp + jjob_fp: The full path to the J-job script corresponding to task_name. This script will launch this J-job using the \"exec\" command (which will first terminate this script and then launch the j-job; see man page of @@ -65,12 +77,13 @@ fi # #----------------------------------------------------------------------- # -# Get the task name and the name of the J-job script. +# Save arguments # #----------------------------------------------------------------------- # -task_name="$1" -jjob_fp="$2" +machine=$(echo_lowercase $1) +task_name="$2" +jjob_fp="$3" # #----------------------------------------------------------------------- # @@ -99,12 +112,38 @@ set -u #----------------------------------------------------------------------- # default_modules_dir="$HOMEdir/modulefiles" -machine=$(echo_lowercase $MACHINE) -if [ "${WORKFLOW_MANAGER}" != "ecflow" ]; then +test ! $(module is-loaded ecflow > /dev/null 2>&1) && ecflow_loaded=false + +if [ "$ecflow_loaded" = "false" ] ; then source "${HOMEdir}/etc/lmod-setup.sh" ${machine} fi module use "${default_modules_dir}" +# Load workflow environment + +if [ -f ${default_modules_dir}/python_srw.lua ] ; then + module load python_srw || print_err_msg_exit "\ + Loading SRW common python module failed. Expected python_srw.lua + in the modules directory here: + modules_dir = \"${default_modules_dir}\"" +fi + +# Modules that use conda and need an environment activated will set the +# SRW_ENV variable to the name of the environment to be activated. That +# must be done within the script, and not inside the module. Do that +# now. +if [ -n "${SRW_ENV:-}" ] ; then + set +u + conda deactivate + conda activate ${SRW_ENV} + set -u +fi + +# Source the necessary blocks of the experiment config YAML +for sect in platform workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done + if [ "${machine}" != "wcoss2" ]; then module load "${BUILD_MOD_FN}" || print_err_msg_exit "\ Loading of platform- and compiler-specific module file (BUILD_MOD_FN) @@ -116,26 +155,15 @@ fi # #----------------------------------------------------------------------- # -# Set the directory (modules_dir) in which the module files for the va- -# rious workflow tasks are located. Also, set the name of the module -# file for the specified task. -# -# A module file is a file whose first line is the "magic cookie" string -# '#%Module'. It is interpreted by the "module load ..." command. It -# sets environment variables (including prepending/appending to paths) -# and loads modules. -# -# The UFS SRW App repository contains module files for the -# workflow tasks in the template rocoto XML file for the FV3-LAM work- -# flow that need modules not loaded in the BUILD_MOD_FN above. +# Set the directory for the modulefiles included with SRW and the +# specific module for the requested task. # # The full path to a module file for a given task is # # $HOMEdir/modulefiles/$machine/${task_name}.local # -# where HOMEdir is the base directory of the workflow, machine is the -# name of the machine that we're running on (in lowercase), and task_- -# name is the name of the current task (an input to this script). +# where HOMEdir is the SRW clone, machine is the name of the platform +# being used, and task_name is the current task to run. # #----------------------------------------------------------------------- # @@ -154,10 +182,10 @@ Loading modules for task \"${task_name}\" ..." module use "${modules_dir}" || print_err_msg_exit "\ Call to \"module use\" command failed." -# source version file (run) only if it is specified in versions directory -VERSION_FILE="${HOMEdir}/versions/${RUN_VER_FN}" -if [ -f ${VERSION_FILE} ]; then - . ${VERSION_FILE} +# source version file only if it exists in the versions directory +version_file="${HOMEdir}/versions/${RUN_VER_FN}" +if [ -f ${version_file} ]; then + source ${version_file} fi # # Load the .local module file if available for the given task @@ -170,20 +198,11 @@ specified task (task_name) failed: task_name = \"${task_name}\" modulefile_local = \"${modulefile_local}\" modules_dir = \"${modules_dir}\"" -elif [ -f ${default_modules_dir}/python_srw.lua ] ; then - module load python_srw || print_err_msg_exit "\ - Loading SRW common python module failed. Expected python_srw.lua - in the modules directory here: - modules_dir = \"${default_modules_dir}\"" fi - module list -# Modules that use conda and need an environment activated will set the -# SRW_ENV variable to the name of the environment to be activated. That -# must be done within the script, and not inside the module. Do that -# now. - +# Reactivate the workflow environment to ensure the correct Python +# environment is available first in the environment. if [ -n "${SRW_ENV:-}" ] ; then set +u conda deactivate @@ -204,11 +223,7 @@ Launching J-job (jjob_fp) for task \"${task_name}\" ... jjob_fp = \"${jjob_fp}\" " -if [ "${WORKFLOW_MANAGER}" = "ecflow" ]; then - /bin/bash "${jjob_fp}" -else - exec "${jjob_fp}" -fi +source "${jjob_fp}" # #----------------------------------------------------------------------- diff --git a/ush/load_modules_wflow.sh b/ush/load_modules_wflow.sh index cf33a43f3f..d770d7c2d9 100755 --- a/ush/load_modules_wflow.sh +++ b/ush/load_modules_wflow.sh @@ -62,12 +62,7 @@ task failed: $has_mu && set +u if [ ! -z $(command -v conda) ]; then -# Gaea-C5 special case missing jinja2 - if [ "${machine}" == "gaea-c5" ]; then - conda activate workflow_tools - else - conda activate srw_app - fi + conda activate srw_app fi $has_mu && set -u diff --git a/ush/machine/derecho.yaml b/ush/machine/derecho.yaml index 511ccc2784..8bc768732f 100644 --- a/ush/machine/derecho.yaml +++ b/ush/machine/derecho.yaml @@ -15,8 +15,8 @@ platform: RUN_CMD_PRDGEN: mpiexec -n $nprocs RUN_CMD_SERIAL: time RUN_CMD_UTILS: mpiexec -n $nprocs - RUN_CMD_NEXUS: mpiexec -n $nprocs - RUN_CMD_AQMLBC: mpiexec -n ${NUMTS} + RUN_CMD_NEXUS: /opt/cray/pe/pals/1.2.11/bin/mpiexec -n $nprocs + RUN_CMD_AQMLBC: /opt/cray/pe/pals/1.2.11/bin/mpiexec -n ${numts} PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }' TEST_EXTRN_MDL_SOURCE_BASEDIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/input_model_data TEST_AQM_INPUT_BASEDIR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/aqm_data @@ -31,6 +31,8 @@ platform: FIXorg: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/fix/fix_orog FIXsfc: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/fix/fix_sfc_climo FIXshp: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/NaturalEarth + FIXaqm: /glade/work/chanhooj/SRW-AQM_DATA/fix_aqm + FIXemis: /glade/work/chanhooj/SRW-AQM_DATA/fix_emis EXTRN_MDL_DATA_STORES: aws data: ics_lbcs: @@ -42,3 +44,7 @@ data: HRRR: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/input_model_data/HRRR/${yyyymmdd}${hh} RAP: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/input_model_data/RAP/${yyyymmdd}${hh} GSMGFS: /glade/work/epicufsrt/contrib/UFS_SRW_data/develop/input_model_data/GSMGFS/${yyyymmdd}${hh} +cpl_aqm_parm: + COMINfire_default: /glade/work/chanhooj/SRW-AQM_DATA/aqm_data/RAVE_fire + COMINgefs_default: /glade/work/chanhooj/SRW-AQM_DATA/aqm_data/GEFS_DATA + NEXUS_GFS_SFC_DIR: /glade/work/chanhooj/SRW-AQM_DATA/aqm_data/GFS_SFC_DATA diff --git a/ush/machine/gaea-c5.yaml b/ush/machine/gaea-c5.yaml deleted file mode 100644 index 1f6f115495..0000000000 --- a/ush/machine/gaea-c5.yaml +++ /dev/null @@ -1,55 +0,0 @@ -platform: - WORKFLOW_MANAGER: rocoto - NCORES_PER_NODE: 128 - SCHED: slurm - TEST_CCPA_OBS_DIR: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/obs_data/ccpa/proc - TEST_MRMS_OBS_DIR: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/obs_data/mrms/proc - TEST_NDAS_OBS_DIR: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/obs_data/ndas/proc - TEST_NOHRSC_OBS_DIR: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/obs_data/nohrsc/proc - DOMAIN_PREGEN_BASEDIR: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/FV3LAM_pregen - QUEUE_DEFAULT: normal - QUEUE_FCST: normal - QUEUE_HPSS: normal - REMOVE_MEMORY: True - PARTITION_HPSS: eslogin_c5 - RUN_CMD_FCST: srun --export=ALL -n ${PE_MEMBER01} - RUN_CMD_POST: srun --export=ALL -n $nprocs - RUN_CMD_PRDGEN: srun --export=ALL -n $nprocs - RUN_CMD_SERIAL: time - RUN_CMD_UTILS: srun --export=ALL -n $nprocs - SCHED_NATIVE_CMD: --clusters=c5 --partition=batch --export=NONE - SCHED_NATIVE_CMD_HPSS: --clusters=es --partition=eslogin_c5 --export=NONE - PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }' - TEST_EXTRN_MDL_SOURCE_BASEDIR: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/input_model_data - TEST_PREGEN_BASEDIR: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/FV3LAM_pregen - TEST_ALT_EXTRN_MDL_SYSBASEDIR_ICS: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/dummy_FV3GFS_sys_dir - TEST_ALT_EXTRN_MDL_SYSBASEDIR_LBCS: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/dummy_FV3GFS_sys_dir - TEST_VX_FCST_INPUT_BASEDIR: '{{ "/lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/output_data/fcst_" }}{{ "ens" if (global.NUM_ENS_MEMBERS > 0) else "det" }}{{ "/{{workflow.PREDEF_GRID_NAME}}" }}{% raw %}{% endraw %}' - FIXaer: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/fix/fix_aer - FIXgsi: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/fix/fix_gsi - FIXgsm: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/fix/fix_am - FIXlut: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/fix/fix_lut - FIXorg: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/fix/fix_orog - FIXsfc: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/fix/fix_sfc_climo - FIXshp: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/NaturalEarth - EXTRN_MDL_DATA_STORES: aws -data: - ics_lbcs: - FV3GFS: - nemsio: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/input_model_data/FV3GFS/nemsio/${yyyymmdd}${hh} - grib2: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/input_model_data/FV3GFS/grib2/${yyyymmdd}${hh} - netcdf: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/input_model_data/FV3GFS/netcdf/${yyyymmdd}${hh} - RAP: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/input_model_data/RAP/${yyyymmdd}${hh} - HRRR: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/input_model_data/HRRR/${yyyymmdd}${hh} - RAP: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/input_model_data/RAP/${yyyymmdd}${hh} - GSMGFS: /lustre/f2/dev/role.epic/contrib/UFS_SRW_data/develop/input_model_data/GSMGFS/${yyyymmdd}${hh} -rocoto: - tasks: - metatask_run_ensemble: - task_run_fcst_mem#mem#: - cores: '{{ task_run_fcst.PE_MEMBER01 // 1 }}' - native: '--cpus-per-task {{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST|int }} --exclusive {{ platform.SCHED_NATIVE_CMD }}' - nodes: - nnodes: - nodesize: - ppn: diff --git a/ush/machine/gaea.yaml b/ush/machine/gaea.yaml new file mode 100644 index 0000000000..1ec2ded2ef --- /dev/null +++ b/ush/machine/gaea.yaml @@ -0,0 +1,55 @@ +platform: + WORKFLOW_MANAGER: rocoto + NCORES_PER_NODE: 128 + SCHED: slurm + TEST_CCPA_OBS_DIR: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/obs_data/ccpa/proc + TEST_MRMS_OBS_DIR: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/obs_data/mrms/proc + TEST_NDAS_OBS_DIR: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/obs_data/ndas/proc + TEST_NOHRSC_OBS_DIR: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/obs_data/nohrsc/proc + DOMAIN_PREGEN_BASEDIR: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/FV3LAM_pregen + QUEUE_DEFAULT: normal + QUEUE_FCST: normal + QUEUE_HPSS: normal + REMOVE_MEMORY: True + PARTITION_HPSS: eslogin_c5 + RUN_CMD_FCST: srun --export=ALL -n ${PE_MEMBER01} + RUN_CMD_POST: srun --export=ALL -n $nprocs + RUN_CMD_PRDGEN: srun --export=ALL -n $nprocs + RUN_CMD_SERIAL: time + RUN_CMD_UTILS: srun --export=ALL -n $nprocs + SCHED_NATIVE_CMD: --clusters=c5 --partition=batch --export=NONE + SCHED_NATIVE_CMD_HPSS: --clusters=es --partition=eslogin_c5 --export=NONE + PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }' + TEST_EXTRN_MDL_SOURCE_BASEDIR: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/input_model_data + TEST_PREGEN_BASEDIR: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/FV3LAM_pregen + TEST_ALT_EXTRN_MDL_SYSBASEDIR_ICS: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/dummy_FV3GFS_sys_dir + TEST_ALT_EXTRN_MDL_SYSBASEDIR_LBCS: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/dummy_FV3GFS_sys_dir + TEST_VX_FCST_INPUT_BASEDIR: '{{ "/gpfs/f5/epic/world-shared/UFS_SRW_data/develop/output_data/fcst_" }}{{ "ens" if (global.NUM_ENS_MEMBERS > 0) else "det" }}{{ "/{{workflow.PREDEF_GRID_NAME}}" }}{% raw %}{% endraw %}' + FIXaer: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/fix/fix_aer + FIXgsi: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/fix/fix_gsi + FIXgsm: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/fix/fix_am + FIXlut: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/fix/fix_lut + FIXorg: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/fix/fix_orog + FIXsfc: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/fix/fix_sfc_climo + FIXshp: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/NaturalEarth + EXTRN_MDL_DATA_STORES: aws +data: + ics_lbcs: + FV3GFS: + nemsio: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/input_model_data/FV3GFS/nemsio/${yyyymmdd}${hh} + grib2: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/input_model_data/FV3GFS/grib2/${yyyymmdd}${hh} + netcdf: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/input_model_data/FV3GFS/netcdf/${yyyymmdd}${hh} + RAP: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/input_model_data/RAP/${yyyymmdd}${hh} + HRRR: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/input_model_data/HRRR/${yyyymmdd}${hh} + RAP: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/input_model_data/RAP/${yyyymmdd}${hh} + GSMGFS: /gpfs/f5/epic/world-shared/UFS_SRW_data/develop/input_model_data/GSMGFS/${yyyymmdd}${hh} +rocoto: + tasks: + metatask_run_ensemble: + task_run_fcst_mem#mem#: + cores: '{{ task_run_fcst.PE_MEMBER01 // 1 }}' + native: '--cpus-per-task {{ task_run_fcst.OMP_NUM_THREADS_RUN_FCST|int }} --exclusive {{ platform.SCHED_NATIVE_CMD }}' + nodes: + nnodes: + nodesize: + ppn: diff --git a/ush/machine/hera.yaml b/ush/machine/hera.yaml index 8d751ae891..80fbb8fc98 100644 --- a/ush/machine/hera.yaml +++ b/ush/machine/hera.yaml @@ -19,9 +19,9 @@ platform: RUN_CMD_SERIAL: time RUN_CMD_UTILS: srun --export=ALL RUN_CMD_NEXUS: srun -n ${nprocs} --export=ALL - RUN_CMD_AQMLBC: srun --export=ALL -n ${NUMTS} - SCHED_NATIVE_CMD: --export=NONE - SCHED_NATIVE_CMD_HPSS: -n 1 --export=NONE + RUN_CMD_AQMLBC: srun --export=ALL -n ${numts} + SCHED_NATIVE_CMD: "--export=NONE" + SCHED_NATIVE_CMD_HPSS: "-n 1 --export=NONE" PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }' TEST_EXTRN_MDL_SOURCE_BASEDIR: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/input_model_data TEST_AQM_INPUT_BASEDIR: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/aqm_data @@ -35,21 +35,14 @@ platform: FIXorg: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/fix/fix_orog FIXsfc: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/fix/fix_sfc_climo FIXshp: /scratch1/NCEPDEV/nems/role.epic/UFS_SRW_data/develop/NaturalEarth + FIXaqm: /scratch2/NAGAPE/epic/SRW-AQM_DATA/fix_aqm + FIXemis: /scratch1/RDARCH/rda-arl-gpu/Barry.Baker/emissions/nexus EXTRN_MDL_DATA_STORES: hpss aws nomads cpl_aqm_parm: - AQM_CONFIG_DIR: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/aqm/epa/data - DCOMINbio_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/aqm/bio - DCOMINdust_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/FENGSHA - DCOMINcanopy_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/canopy - DCOMINfire_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/RAVE_fire - DCOMINchem_lbcs_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/LBCS/AQM_NA13km_AM4_v1 - DCOMINgefs_default: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/GEFS_DATA - DCOMINpt_src_default: /scratch1/RDARCH/rda-arl-gpu/Barry.Baker/emissions/nexus/NEI2016v1/v2023-01-PT - NEXUS_INPUT_DIR: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/emissions/nexus - NEXUS_FIX_DIR: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/nexus/fix - NEXUS_GFS_SFC_DIR: /scratch2/NCEPDEV/naqfc/RRFS_CMAQ/GFS_DATA - PT_SRC_BASEDIR: /scratch1/RDARCH/rda-arl-gpu/Barry.Baker/emissions/nexus/NEI2016v1/v2023-01-PT + COMINfire_default: /scratch2/NAGAPE/epic/SRW-AQM_DATA/aqm_data/RAVE_fire + COMINgefs_default: /scratch2/NAGAPE/epic/SRW-AQM_DATA/aqm_data/GEFS_DATA + NEXUS_GFS_SFC_DIR: /scratch2/NAGAPE/epic/SRW-AQM_DATA/aqm_data/GFS_SFC_DATA rocoto: tasks: diff --git a/ush/machine/hercules.yaml b/ush/machine/hercules.yaml index e300cf3d6d..e29801dd49 100644 --- a/ush/machine/hercules.yaml +++ b/ush/machine/hercules.yaml @@ -19,7 +19,7 @@ platform: RUN_CMD_SERIAL: time RUN_CMD_UTILS: srun --export=ALL -n $nprocs RUN_CMD_NEXUS: srun --export=ALL - RUN_CMD_AQMLBC: srun --export=ALL -n ${NUMTS} + RUN_CMD_AQMLBC: srun --export=ALL -n ${numts} SCHED_NATIVE_CMD: --export=NONE PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }' TEST_EXTRN_MDL_SOURCE_BASEDIR: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data @@ -33,6 +33,8 @@ platform: FIXorg: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/fix/fix_orog FIXsfc: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/fix/fix_sfc_climo FIXshp: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/NaturalEarth + FIXaqm: /work/noaa/epic/SRW-AQM_DATA/fix_aqm + FIXemis: /work/noaa/epic/SRW-AQM_DATA/fix_emis EXTRN_MDL_DATA_STORES: aws data: ics_lbcs: @@ -44,3 +46,8 @@ data: HRRR: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data/HRRR/${yyyymmdd}${hh} RAP: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data/RAP/${yyyymmdd}${hh} GSMGFS: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data/GSMGFS/${yyyymmdd}${hh} + +cpl_aqm_parm: + COMINfire_default: /work/noaa/epic/SRW-AQM_DATA/aqm_data/RAVE_fire + COMINgefs_default: /work/noaa/epic/SRW-AQM_DATA/aqm_data/GEFS_DATA + NEXUS_GFS_SFC_DIR: /work/noaa/epic/SRW-AQM_DATA/aqm_data/GFS_SFC_DATA diff --git a/ush/machine/orion.yaml b/ush/machine/orion.yaml index 3f74905c8f..3f756e2836 100644 --- a/ush/machine/orion.yaml +++ b/ush/machine/orion.yaml @@ -19,7 +19,7 @@ platform: RUN_CMD_SERIAL: time RUN_CMD_UTILS: srun --export=ALL RUN_CMD_NEXUS: srun --export=ALL - RUN_CMD_AQMLBC: srun --export=ALL -n ${NUMTS} + RUN_CMD_AQMLBC: srun --export=ALL -n ${numts} SCHED_NATIVE_CMD: --export=NONE PRE_TASK_CMDS: '{ ulimit -s unlimited; ulimit -a; }' TEST_EXTRN_MDL_SOURCE_BASEDIR: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data @@ -32,6 +32,8 @@ platform: FIXorg: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/fix/fix_orog FIXsfc: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/fix/fix_sfc_climo FIXshp: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/NaturalEarth + FIXaqm: /work/noaa/epic/SRW-AQM_DATA/fix_aqm + FIXemis: /work/noaa/epic/SRW-AQM_DATA/fix_emis EXTRN_MDL_DATA_STORES: aws nomads data: ics_lbcs: @@ -43,3 +45,8 @@ data: HRRR: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data/HRRR/${yyyymmdd}${hh} RAP: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data/RAP/${yyyymmdd}${hh} GSMGFS: /work/noaa/epic/role-epic/contrib/UFS_SRW_data/develop/input_model_data/GSMGFS/${yyyymmdd}${hh} + +cpl_aqm_parm: + COMINfire_default: /work/noaa/epic/SRW-AQM_DATA/aqm_data/RAVE_fire + COMINgefs_default: /work/noaa/epic/SRW-AQM_DATA/aqm_data/GEFS_DATA + NEXUS_GFS_SFC_DIR: /work/noaa/epic/SRW-AQM_DATA/aqm_data/GFS_SFC_DATA diff --git a/ush/machine/wcoss2.yaml b/ush/machine/wcoss2.yaml index 53733959bb..b8c3625dff 100644 --- a/ush/machine/wcoss2.yaml +++ b/ush/machine/wcoss2.yaml @@ -41,21 +41,6 @@ data: RAP: compath.py ${envir}/rap/${rap_ver}/rap.${PDYext} NAM: compath.py ${envir}/nam/${nam_ver}/nam.${PDYext} HRRR: compath.py ${envir}/hrrr/${hrrr_ver}/hrrr.${PDYext}/conus -cpl_aqm_parm: - AQM_CONFIG_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/aqm/epa/data - DCOMINbio_default: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/aqm/bio - DCOMINdust_default: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/FENGSHA - DCOMINcanopy_default: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/canopy - DCOMINfire_default: /lfs/h1/ops/dev/dcom - DCOMINchem_lbcs_default: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/LBCS/AQM_NA13km_AM4_v1 - DCOMINgefs_default: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/GEFS_DATA - DCOMINpt_src_default: /lfs/h2/emc/physics/noscrub/Youhua.Tang/nei2016v1-pt/v2023-01-PT - DCOMINairnow_default: /lfs/h1/ops/prod/dcom - COMINbicor: /lfs/h2/emc/physics/noscrub/jianping.huang/Bias_correction/aqmv7.0.81 - COMOUTbicor: /lfs/h2/emc/physics/noscrub/jianping.huang/Bias_correction/aqmv7.0.81 - NEXUS_INPUT_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/nexus_emissions - NEXUS_FIX_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/nexus/fix - NEXUS_GFS_SFC_DIR: /lfs/h2/emc/lam/noscrub/RRFS_CMAQ/GFS_DATA rocoto: tasks: diff --git a/ush/set_FV3nml_ens_stoch_seeds.py b/ush/set_fv3nml_ens_stoch_seeds.py similarity index 62% rename from ush/set_FV3nml_ens_stoch_seeds.py rename to ush/set_fv3nml_ens_stoch_seeds.py index c8a90e2797..0b9b186210 100644 --- a/ush/set_FV3nml_ens_stoch_seeds.py +++ b/ush/set_fv3nml_ens_stoch_seeds.py @@ -1,32 +1,27 @@ #!/usr/bin/env python3 +""" +Updates stochastic physics parameters in the namelist based on user configuration settings. +""" + +import argparse +import datetime as dt import os import sys -import argparse from textwrap import dedent -from datetime import datetime + +from uwtools.api.config import get_nml_config, realize from python_utils import ( + cfg_to_yaml_str, + import_vars, + load_yaml_config, print_input_args, print_info_msg, - print_err_msg_exit, - date_to_str, - mkdir_vrfy, - cp_vrfy, - cd_vrfy, - str_to_type, - import_vars, - set_env_var, - define_macos_utilities, - cfg_to_yaml_str, - load_shell_config, - flatten_dict, ) -from set_namelist import set_namelist - -def set_FV3nml_ens_stoch_seeds(cdate): +def set_fv3nml_ens_stoch_seeds(cdate, expt_config): """ This function, for an ensemble-enabled experiment (i.e. for an experiment for which the workflow configuration variable @@ -39,15 +34,20 @@ def set_FV3nml_ens_stoch_seeds(cdate): called as part of the TN_RUN_FCST task. Args: - cdate + cdate the cycle + expt_config the in-memory dict representing the experiment configuration Returns: None """ print_input_args(locals()) - # import all environment variables - import_vars() + fv3_nml_fn = expt_config["workflow"]["FV3_NML_FN"] + verbose = expt_config["workflow"]["VERBOSE"] + + # set variables important to this function from the experiment definition + import_vars(dictionary=expt_config["global"]) + # pylint: disable=undefined-variable # # ----------------------------------------------------------------------- @@ -57,9 +57,9 @@ def set_FV3nml_ens_stoch_seeds(cdate): # # ----------------------------------------------------------------------- # - fv3_nml_ensmem_fp = f"{os.getcwd()}{os.sep}{FV3_NML_FN}" + fv3_nml_ensmem_fp = f"{os.getcwd()}{os.sep}{fv3_nml_fn}" - ensmem_num = int(ENSMEM_INDX) + ensmem_num = int(os.environ["ENSMEM_INDX"]) cdate_i = int(cdate.strftime("%Y%m%d%H")) @@ -95,49 +95,39 @@ def set_FV3nml_ens_stoch_seeds(cdate): settings["nam_sfcperts"] = {"iseed_lndp": [iseed_lsm_spp]} - settings_str = cfg_to_yaml_str(settings) - print_info_msg( dedent( f""" The variable 'settings' specifying seeds in '{fv3_nml_ensmem_fp}' has been set as follows: - settings =\n\n""" - ) - + settings_str, - verbose=VERBOSE, - ) + settings =\n\n - try: - set_namelist( - ["-q", "-n", fv3_nml_ensmem_fp, "-u", settings_str, "-o", fv3_nml_ensmem_fp] - ) - except: - print_err_msg_exit( - dedent( - f""" - Call to python script set_namelist.py to set the variables in the FV3 - namelist file that specify the paths to the surface climatology files - failed. Parameters passed to this script are: - Full path to base namelist file: - FV3_NML_FP = '{FV3_NML_FP}' - Full path to output namelist file: - fv3_nml_ensmem_fp = '{fv3_nml_ensmem_fp}' - Namelist settings specified on command line (these have highest precedence):\n - settings =\n\n""" - ) - + settings_str + {cfg_to_yaml_str(settings)}""" + ), + verbose=verbose, + ) + realize( + input_config=fv3_nml_ensmem_fp, + input_format="nml", + output_file=fv3_nml_ensmem_fp, + output_format="nml", + update_config=get_nml_config(settings), ) - def parse_args(argv): """Parse command line arguments""" parser = argparse.ArgumentParser( description="Creates stochastic seeds for an ensemble experiment." ) - parser.add_argument("-c", "--cdate", dest="cdate", required=True, help="Date.") + parser.add_argument( + "-c", "--cdate", + dest="cdate", + required=True, + type=lambda d: dt.datetime.strptime(d, '%Y%m%d%H'), + help="Date.", + ) parser.add_argument( "-p", @@ -152,7 +142,5 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) - cfg = flatten_dict(cfg) - import_vars(dictionary=cfg) - set_FV3nml_ens_stoch_seeds(str_to_type(args.cdate)) + cfg = load_yaml_config(args.path_to_defns) + set_fv3nml_ens_stoch_seeds(args.cdate, cfg) diff --git a/ush/set_FV3nml_sfc_climo_filenames.py b/ush/set_fv3nml_sfc_climo_filenames.py similarity index 51% rename from ush/set_FV3nml_sfc_climo_filenames.py rename to ush/set_fv3nml_sfc_climo_filenames.py index a1ffaa57ef..7251a5b0e6 100644 --- a/ush/set_FV3nml_sfc_climo_filenames.py +++ b/ush/set_fv3nml_sfc_climo_filenames.py @@ -1,33 +1,42 @@ #!/usr/bin/env python3 +""" +Update filenames for surface climotology files in the namelist. +""" + +import argparse import os +import re import sys -import argparse from textwrap import dedent +from uwtools.api.config import get_nml_config, get_yaml_config, realize + from python_utils import ( - print_input_args, - print_info_msg, - print_err_msg_exit, + cfg_to_yaml_str, check_var_valid_value, - mv_vrfy, - mkdir_vrfy, - cp_vrfy, - rm_vrfy, - import_vars, - set_env_var, - load_config_file, - load_shell_config, flatten_dict, - define_macos_utilities, - find_pattern_in_str, - cfg_to_yaml_str, + import_vars, + load_yaml_config, + print_info_msg, ) -from set_namelist import set_namelist +VERBOSE = os.environ.get("VERBOSE", "true") + +NEEDED_VARS = [ + "CRES", + "DO_ENSEMBLE", + "EXPTDIR", + "FIXlam", + "FV3_NML_FP", + "PARMdir", + "RUN_ENVIR", + ] + +# pylint: disable=undefined-variable -def set_FV3nml_sfc_climo_filenames(debug=False): +def set_fv3nml_sfc_climo_filenames(config, debug=False): """ This function sets the values of the variables in the forecast model's namelist file that specify the paths to the surface @@ -43,13 +52,9 @@ def set_FV3nml_sfc_climo_filenames(debug=False): None """ - # import all environment variables - import_vars() + import_vars(dictionary=config, env_vars=NEEDED_VARS) - # fixed file mapping variables - fixed_cfg = load_config_file(os.path.join(PARMdir, "fixed_files_mapping.yaml")) - IMPORTS = ["SFC_CLIMO_FIELDS", "FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING"] - import_vars(dictionary=flatten_dict(fixed_cfg), env_vars=IMPORTS) + fixed_cfg = get_yaml_config(os.path.join(PARMdir, "fixed_files_mapping.yaml"))["fixed_files"] # The regular expression regex_search set below will be used to extract # from the elements of the array FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING @@ -68,18 +73,16 @@ def set_FV3nml_sfc_climo_filenames(debug=False): dummy_run_dir += os.sep + "any_ensmem" namsfc_dict = {} - for mapping in FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING: - tup = find_pattern_in_str(regex_search, mapping) - nml_var_name = tup[0] - sfc_climo_field_name = tup[1] + for mapping in fixed_cfg["FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING"]: + nml_var_name, sfc_climo_field_name = re.search(regex_search, mapping).groups() - check_var_valid_value(sfc_climo_field_name, SFC_CLIMO_FIELDS) + check_var_valid_value(sfc_climo_field_name, fixed_cfg["SFC_CLIMO_FIELDS"]) - fp = os.path.join(FIXlam, f"{CRES}.{sfc_climo_field_name}.{suffix}") + file_path = os.path.join(FIXlam, f"{CRES}.{sfc_climo_field_name}.{suffix}") if RUN_ENVIR != "nco": - fp = os.path.relpath(os.path.realpath(fp), start=dummy_run_dir) + file_path = os.path.relpath(os.path.realpath(file_path), start=dummy_run_dir) - namsfc_dict[nml_var_name] = fp + namsfc_dict[nml_var_name] = file_path settings["namsfc_dict"] = namsfc_dict settings_str = cfg_to_yaml_str(settings) @@ -89,40 +92,22 @@ def set_FV3nml_sfc_climo_filenames(debug=False): f""" The variable 'settings' specifying values of the namelist variables has been set as follows:\n - settings =\n\n""" - ) - + settings_str, + settings = + + {settings_str} + """ + ), verbose=debug, ) - # Rename the FV3 namelist and call set_namelist - fv3_nml_base_fp = f"{FV3_NML_FP}.base" - mv_vrfy(f"{FV3_NML_FP} {fv3_nml_base_fp}") - - try: - set_namelist( - ["-q", "-n", fv3_nml_base_fp, "-u", settings_str, "-o", FV3_NML_FP] - ) - except: - print_err_msg_exit( - dedent( - f""" - Call to python script set_namelist.py to set the variables in the FV3 - namelist file that specify the paths to the surface climatology files - failed. Parameters passed to this script are: - Full path to base namelist file: - fv3_nml_base_fp = '{fv3_nml_base_fp}' - Full path to output namelist file: - FV3_NML_FP = '{FV3_NML_FP}' - Namelist settings specified on command line (these have highest precedence):\n - settings =\n\n""" - ) - + settings_str + realize( + input_config=FV3_NML_FP, + input_format="nml", + output_file=FV3_NML_FP, + output_format="nml", + update_config=get_nml_config(settings), ) - rm_vrfy(f"{fv3_nml_base_fp}") - - def parse_args(argv): """Parse command line arguments""" parser = argparse.ArgumentParser(description="Set surface climatology fields.") @@ -142,7 +127,6 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) + cfg = load_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) - import_vars(dictionary=cfg) - set_FV3nml_sfc_climo_filenames(args.debug) + set_fv3nml_sfc_climo_filenames(cfg, args.debug) diff --git a/ush/set_namelist.py b/ush/set_namelist.py deleted file mode 100755 index e578d3201f..0000000000 --- a/ush/set_namelist.py +++ /dev/null @@ -1,355 +0,0 @@ -#!/usr/bin/env python3 - -""" -This utility updates a Fortran namelist file using the f90nml package. The -settings that are modified are supplied via command line YAML-formatted string -and/or YAML configuration files. - -Additionally, the tool can be used to create a YAML file from an input namelist, -or the difference between two namelists. - -The user configuration file should contain a heirarchy that follows the -heirarchy for the Fortran namelist. An example of modifying an FV3 namelist: - - Configuration file contains: - - fv_core_nml: - k_split: 4 - n_split: 5 - - gfs_physics_nml: - do_sppt: True - -The output namelist will differ from the input namelist by only these three -settings. If one of these sections and/or variables did not previously exist, it -will be automatically created. It is up to the user to ensure that configuration -settings are provided under the correct sections and variable names. - -The optional base configuration file (provided via the -c command line argument) -contains the known set of configurations used and supported by the community, if -using the one provided in parm/FV3.input.yml. If maintaining this file -for a different set of configurations, ensure that the heirarchy is such that it -names the configuration at the top level (section), and the subsequent sections -match those in the F90 namelist that will be updated. - -Examples - - To show help options: - - set_namelist.py -h - - To produce a namelist (fv3_expt.nml) by specifying a physics package: - - set_namelist.py -n ../parm/input.nml.FV3 -c ../parm/FV3.input.yml FV3_HRRR - -o fv3_expt.nml - - To produce a YAML file (fv3_namelist.yml) from a user namelist: - - set_namelist.py -i my_namelist.nml -o fv3_namelist.nml -t yaml - - To produce a YAML file (fv3_my_namelist.yml) with differences from base nml: - - set_namelist.py -n ../parm/input.nml.FV3 -i my_namelist.nml -t yaml - -o fv3_my_namelist.nml - -Expected behavior: - - - A Fortran namelist that contains only user-defined settings will be - generated if no input namelist is provided. - - An unmodified copy of an input namelist will be generated in the - designated output location if no user-settings are provided. - - Command-line-entered settings over-ride settings in YAML configuration - file. - - Given a user namelist, the script can dump a YAML file. - - Given a user namelist and a base namelist, the script can dump the - difference in the two to a YAML file that can be included as a section - in the supported configs. -""" - -import argparse -import collections -import os -import sys - -import f90nml -import yaml - - -def config_exists(arg): - - """ - Checks whether the config file exists and if it contains the input - section. Returns the arg as provided if checks are passed. - """ - - # Agument is expected to be a 2-item list of file name and internal section - # name. - file_name = arg[0] - section_name = arg[1] - - file_exists(file_name) - - # Load the YAML file into a dictionary - with open(file_name, "r") as fn: - cfg = yaml.load(fn, Loader=yaml.Loader) - - # Grab only the section that is specified by the user - try: - cfg = cfg[section_name] - except KeyError: - msg = f"Section {section_name} does not exist in top level of {file_name}" - raise argparse.ArgumentTypeError(msg) - - return [cfg, section_name] - - -def file_exists(arg): - - """Check for existence of file""" - - if not os.path.exists(arg): - msg = f"{arg} does not exist!" - raise argparse.ArgumentTypeError(msg) - - return arg - - -def load_config(arg): - - """ - Check to ensure that the provided config file exists. If it does, load it - with YAML's safe loader and return the resulting dict. - """ - - return yaml.safe_load(arg) - - -def path_ok(arg): - - """ - Check whether the path to the file exists, and is writeable. Return the path - if it passes all checks, otherwise raise an error. - """ - - # Get the absolute path provided by arg - dir_name = os.path.abspath(os.path.dirname(arg)) - - # Ensure the arg path exists, and is writable. Raise error if not. - if os.path.lexists(dir_name) and os.access(dir_name, os.W_OK): - return arg - - msg = f"{arg} is not a writable path!" - raise argparse.ArgumentTypeError(msg) - - -def parse_args(argv): - - """ - Function maintains the arguments accepted by this script. Please see - Python's argparse documenation for more information about settings of each - argument. - """ - - parser = argparse.ArgumentParser( - description="Update a Fortran namelist with user-defined settings." - ) - - # Required - parser.add_argument( - "-o", - "--outfile", - help="Required: Full path to output file. This is a \ - namelist by default.", - required=True, - type=path_ok, - ) - - # Optional - parser.add_argument( - "-c", - "--config", - help="Full path to a YAML config file containing multiple \ - configurations, and the top-level section to use. Optional.", - metavar=("[FILE,", "SECTION]"), - nargs=2, - ) - parser.add_argument( - "-i", - "--input_nml", - help="Path to a user namelist. Use with -n and \ - -t yaml to get a YAML file to use with workflow.", - type=file_exists, - ) - parser.add_argument( - "-n", - "--basenml", - dest="nml", - help="Full path to the input Fortran namelist. Optional.", - type=file_exists, - ) - parser.add_argument( - "-t", - "--type", - choices=["nml", "yaml"], - default="nml", - help="Output file type.", - ) - parser.add_argument( - "-u", - "--user_config", - help="Command-line user config options in YAML-formatted \ - string. These options will override any provided in an \ - input file. Optional.", - metavar="YAML STRING", - type=load_config, - ) - - # Flags - parser.add_argument( - "-q", - "--quiet", - action="store_true", - help="If provided, suppress all output.", - ) - return parser.parse_args(argv) - - -def dict_diff(dict1, dict2): - - """ - Produces a dictionary of how dict2 differs from dict1 - """ - - diffs = {} - - # Loop through dict1 sections and key/value pairs - for sect, items in dict1.items(): - for key, val in items.items(): - - # If dict 2 has a different value, record the dict2 value - if val != dict2.get(sect, {}).get(key, ""): - if not diffs.get(sect): - diffs[sect] = {} - diffs[sect][key] = dict2.get(sect, {}).get(key) - - # Loop through dict2 sections and key/value pairs to catch any settings that - # may be present in the 2nd dict that weren't in the first. - for sect, items in dict2.items(): - for key, val in items.items(): - - # If dict1 has a diffent value than dict2, record the dict2 value - if val != dict1.get(sect, {}).get(key, ""): - - # Check to make sure it hasn't already been recorded - if diffs.get(sect, {}).get(key, "DNE") == "DNE": - if not diffs.get(sect): - diffs[sect] = {} - diffs[sect][key] = val - return diffs - - -def to_dict(odict): - - """Recursively convert OrderedDict to Python dict.""" - - if not isinstance(odict, collections.OrderedDict): - return odict - - ret = dict(odict) - for key, value in ret.items(): - if isinstance(value, collections.OrderedDict): - ret[key] = to_dict(value) - return ret - - -def update_dict(dest, newdict, quiet=False): - - """ - Overwrites all values in dest dictionary with values from newdict. Turn off - print statements with queit=True. - - Input: - - dest A dict that is to be updated. - newdict A dict containing sections and keys corresponding to - those in dest and potentially additional ones, that will be used to - update the dest dict. - quiet An optional boolean flag to turn off output. - - Output: - - None - - Result: - - The dest dict is updated in place. - """ - - for sect, values in newdict: - # If section is set to None, remove all contents from namelist - if values is None: - dest[sect] = {} - else: - for key, value in values.items(): - if not quiet: - print(f"Setting {sect}.{key} = {value}") - - # Remove key from dict if config is set to None - if value is None: - _ = dest[sect].pop(key, None) - else: - - try: - dest[sect][key] = value - except KeyError: - # Namelist section did not exist. Create it and update the value. - dest[sect] = {} - dest[sect][key] = value - - -def set_namelist(argv): - - """Using input command line arguments (cla), update a Fortran namelist file.""" - - # parse argumetns - cla = parse_args(argv) - if cla.config: - cla.config, _ = config_exists(cla.config) - - # Load base namelist into dict - nml = f90nml.Namelist() - if cla.nml is not None: - nml = f90nml.read(cla.nml) - - # Update namelist settings (nml) with config file settings (cfg) - cfg = {} - if cla.config is not None: - cfg = cla.config - update_dict(nml, cfg.items(), quiet=cla.quiet) - - # Update nml, overriding YAML if needed, with any command-line entries - if cla.user_config: - update_dict(nml, cla.user_config.items(), quiet=cla.quiet) - - # Write the resulting file - with open(cla.outfile, "w") as fn: - if cla.type == "nml": - nml.write(fn, sort=True) - - if cla.type == "yaml": - if cla.input_nml: - input_nml = f90nml.read(cla.input_nml) - - # Determine how input_nml differs from the configured namelist - diff = dict_diff(nml, input_nml) - - # Write diffs to YAML file - yaml.dump(diff, fn) - - else: - # Write the namelist to YAML file - yaml.dump(to_dict(nml.todict()), fn) - - -if __name__ == "__main__": - set_namelist(sys.argv[1:]) diff --git a/ush/set_ozone_param.py b/ush/set_ozone_param.py deleted file mode 100644 index 14a57b3fe9..0000000000 --- a/ush/set_ozone_param.py +++ /dev/null @@ -1,130 +0,0 @@ -#!/usr/bin/env python3 - -import copy -import os -from textwrap import dedent - -from python_utils import ( - log_info, - list_to_str, - print_input_args, - load_xml_file, - has_tag_with_value, - find_pattern_in_str, -) - - -def set_ozone_param(ccpp_phys_suite_fp, link_mappings): - """Function that does the following: - (1) Determines the ozone parameterization being used by checking in the - CCPP physics suite XML. - - (2) Sets the name of the global ozone production/loss file in the FIXgsm - FIXgsm system directory to copy to the experiment's FIXam directory. - - (3) Updates the symlink for the ozone file provided in link_mappings - list to include the name of global ozone production/loss file. - - Args: - ccpp_phys_suite_fp: full path to CCPP physics suite - link_mappings: list of mappings between symlinks and their - target files for this experiment - Returns: - ozone_param: a string - fixgsm_ozone_fn: a path to a fix file that should be used with - this experiment - ozone_link_mappings: a list of mappings for the files needed for - this experiment - - """ - - print_input_args(locals()) - - # - # ----------------------------------------------------------------------- - # - # Get the name of the ozone parameterization being used. There are two - # possible ozone parameterizations: - # - # (1) A parameterization developed/published in 2015. Here, we refer to - # this as the 2015 parameterization. If this is being used, then we - # set the variable ozone_param to the string "ozphys_2015". - # - # (2) A parameterization developed/published sometime after 2015. Here, - # we refer to this as the after-2015 parameterization. If this is - # being used, then we set the variable ozone_param to the string - # "ozphys". - # - # We check the CCPP physics suite definition file (SDF) to determine the - # parameterization being used. If this file contains the line - # - # ozphys_2015 - # - # then the 2015 parameterization is being used. If it instead contains - # the line - # - # ozphys - # - # then the after-2015 parameterization is being used. (The SDF should - # contain exactly one of these lines; not both nor neither; we check for - # this.) - # - # ----------------------------------------------------------------------- - # - tree = load_xml_file(ccpp_phys_suite_fp) - ozone_param = "" - if has_tag_with_value(tree, "scheme", "ozphys_2015"): - fixgsm_ozone_fn = "ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77" - ozone_param = "ozphys_2015" - elif has_tag_with_value(tree, "scheme", "ozphys"): - fixgsm_ozone_fn = "global_o3prdlos.f77" - ozone_param = "ozphys" - else: - raise KeyError( - f"Unknown or no ozone parameterization specified in the " - "CCPP physics suite file '{ccpp_phys_suite_fp}'" - ) - # - # ----------------------------------------------------------------------- - # - # Set the element in the array CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING that - # specifies the mapping between the symlink for the ozone production/loss - # file that must be created in each cycle directory and its target in the - # FIXam directory. The name of the symlink is already in the array, but - # the target is not because it depends on the ozone parameterization that - # the physics suite uses. Since we determined the ozone parameterization - # above, we now set the target of the symlink accordingly. - - # - # ----------------------------------------------------------------------- - # - # Set the mapping between the symlink and the target file we just - # found. The link name is already in the list, but the target file - # is not. - # - # ----------------------------------------------------------------------- - # - - ozone_symlink = "global_o3prdlos.f77" - fixgsm_ozone_fn_is_set = False - - ozone_link_mappings = copy.deepcopy(link_mappings) - for i, mapping in enumerate(ozone_link_mappings): - symlink = mapping.split("|")[0] - if symlink.strip() == ozone_symlink: - ozone_link_mappings[i] = f"{symlink}| {fixgsm_ozone_fn}" - fixgsm_ozone_fn_is_set = True - break - - # Make sure the list has been updated - if not fixgsm_ozone_fn_is_set: - - raise Exception( - f""" - Unable to set name of the ozone production/loss file in the FIXgsm directory - in the array that specifies the mapping between the symlinks that need to - be created in the cycle directories and the files in the FIXgsm directory: - fixgsm_ozone_fn_is_set = '{fixgsm_ozone_fn_is_set}'""" - ) - - return ozone_param, fixgsm_ozone_fn, ozone_link_mappings diff --git a/ush/set_vx_fhr_list.sh b/ush/set_vx_fhr_list.sh index 8a1c9735a5..5cefc78365 100644 --- a/ush/set_vx_fhr_list.sh +++ b/ush/set_vx_fhr_list.sh @@ -253,7 +253,7 @@ METplus configuration file. # fhr_list=$( echo "${fhr_list}" | $SED "s/^,//g" ) print_info_msg "$VERBOSE" "\ -Final (i.e. after filtering for missing files) set of foreast hours is +Final (i.e. after filtering for missing files) set of forecast hours is (written as a single string): fhr_list = \"${fhr_list}\" " diff --git a/ush/setup.py b/ush/setup.py index 1d574ec18c..335ce229e1 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -10,10 +10,12 @@ from textwrap import dedent import yaml +from uwtools.api.config import get_yaml_config from python_utils import ( log_info, cd_vrfy, + date_to_str, mkdir_vrfy, rm_vrfy, check_var_valid_value, @@ -39,7 +41,6 @@ from set_cycle_dates import set_cycle_dates from set_predef_grid_params import set_predef_grid_params -from set_ozone_param import set_ozone_param from set_gridparams_ESGgrid import set_gridparams_ESGgrid from set_gridparams_GFDLgrid import set_gridparams_GFDLgrid from link_fix import link_fix @@ -1161,49 +1162,15 @@ def get_location(xcs, fmt, expt_cfg): # # ----------------------------------------------------------------------- # - - # These NCO variables need to be set based on the user's specified - # run environment. The default is set in config_defaults for nco. If - # running in community mode, we set these paths to the experiment - # directory. - nco_vars = [ - "opsroot_default", - "comroot_default", - "dataroot_default", - "dcomroot_default", - "comin_basedir", - "comout_basedir", - ] - - nco_config = expt_config["nco"] - if run_envir != "nco": - # Put the variables in config dict. - for nco_var in nco_vars: - nco_config[nco_var.upper()] = exptdir - # Use env variables for NCO variables and create NCO directories workflow_manager = expt_config["platform"].get("WORKFLOW_MANAGER") if run_envir == "nco" and workflow_manager == "rocoto": - for nco_var in nco_vars: - envar = os.environ.get(nco_var) - if envar is not None: - nco_config[nco_var.upper()] = envar - - mkdir_vrfy(f' -p "{nco_config.get("OPSROOT_default")}"') - mkdir_vrfy(f' -p "{nco_config.get("COMROOT_default")}"') - mkdir_vrfy(f' -p "{nco_config.get("DATAROOT_default")}"') - mkdir_vrfy(f' -p "{nco_config.get("DCOMROOT_default")}"') - # Update the rocoto string for the fcst output location if # running an ensemble in nco mode if global_sect["DO_ENSEMBLE"]: rocoto_config["entities"]["FCST_DIR"] = \ - "{{ nco.DATAROOT_default }}/run_fcst_mem#mem#.{{ workflow.WORKFLOW_ID }}_@Y@m@d@H" - - if nco_config["DBNROOT_default"] and workflow_manager == "rocoto": - mkdir_vrfy(f' -p "{nco_config["DBNROOT_default"]}"') + "{{ nco.PTMP }}/{{ nco.envir_default }}/tmp/run_fcst_mem#mem#.{{ workflow.WORKFLOW_ID }}_@Y@m@d@H" - mkdir_vrfy(f' -p "{nco_config.get("LOGBASEDIR_default")}"') # create experiment dir mkdir_vrfy(f' -p "{exptdir}"') @@ -1264,43 +1231,6 @@ def get_location(xcs, fmt, expt_cfg): FIELD_DICT_IN_UWM_FP = '{field_dict_in_uwm_fp}'""" ) - fixed_files = expt_config["fixed_files"] - # Set the appropriate ozone production/loss file paths and symlinks - ozone_param, fixgsm_ozone_fn, ozone_link_mappings = set_ozone_param( - ccpp_phys_suite_in_ccpp_fp, - fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"], - ) - - # Reset the dummy value saved in the last list item to the ozone - # file name - fixed_files["FIXgsm_FILES_TO_COPY_TO_FIXam"][-1] = fixgsm_ozone_fn - - # Reset the experiment config list with the update list - fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"] = ozone_link_mappings - - log_info( - f""" - The ozone parameter used for this experiment is {ozone_param}. - """ - ) - - log_info( - f""" - The list that sets the mapping between symlinks in the cycle - directory, and the files in the FIXam directory has been updated - to include the ozone production/loss file. - """, - verbose=verbose, - ) - - log_info( - f""" - CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING = {list_to_str(ozone_link_mappings)} - """, - verbose=verbose, - dedent_=False, - ) - # # ----------------------------------------------------------------------- # @@ -1388,6 +1318,8 @@ def dict_find(user_dict, substring): (run_make_ics or run_make_lbcs), } + fixed_files = expt_config["fixed_files"] + prep_tasks = ["GRID", "OROG", "SFC_CLIMO"] res_in_fixlam_filenames = None for prep_task in prep_tasks: @@ -1515,8 +1447,8 @@ def dict_find(user_dict, substring): # If the model ICs or BCs are not from RAP or HRRR, they will not contain aerosol # climatology data needed by the Thompson scheme, so we need to provide a separate file - if (get_extrn_ics["EXTRN_MDL_NAME_ICS"] not in ["HRRR", "RAP"] or - get_extrn_lbcs["EXTRN_MDL_NAME_LBCS"] not in ["HRRR", "RAP"]): + if (get_extrn_ics["EXTRN_MDL_NAME_ICS"] not in ["HRRR", "RRFS", "RAP"] or + get_extrn_lbcs["EXTRN_MDL_NAME_LBCS"] not in ["HRRR", "RRFS", "RAP"]): fixed_files["THOMPSON_FIX_FILES"].append(workflow_config["THOMPSON_MP_CLIMO_FN"]) # Add thompson-specific fix files to CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING and @@ -1569,10 +1501,13 @@ def dict_find(user_dict, substring): yaml.Dumper.ignore_aliases = lambda *args : True yaml.dump(expt_config.get("rocoto"), f, sort_keys=False) - var_defns_cfg = copy.deepcopy(expt_config) + var_defns_cfg = get_yaml_config(config=expt_config) del var_defns_cfg["rocoto"] - with open(global_var_defns_fp, "a") as f: - f.write(cfg_to_shell_str(var_defns_cfg)) + + # Fixup a couple of data types: + for dates in ("DATE_FIRST_CYCL", "DATE_LAST_CYCL"): + var_defns_cfg["workflow"][dates] = date_to_str(var_defns_cfg["workflow"][dates]) + var_defns_cfg.dump(global_var_defns_fp) # diff --git a/ush/source_util_funcs.sh b/ush/source_util_funcs.sh index fa097de34d..9feceaf68e 100644 --- a/ush/source_util_funcs.sh +++ b/ush/source_util_funcs.sh @@ -115,16 +115,6 @@ function source_util_funcs() { # #----------------------------------------------------------------------- # -# Source the file containing functions that execute filesystem commands -# (e.g. "cp", "mv") with verification (i.e. verifying that the commands -# completed successfully). -# -#----------------------------------------------------------------------- -# - . ${bashutils_dir}/filesys_cmds_vrfy.sh -# -#----------------------------------------------------------------------- -# # Source the file containing the function that searches an array for a # specified string. # @@ -230,15 +220,15 @@ function source_util_funcs() { #----------------------------------------------------------------------- # . ${bashutils_dir}/eval_METplus_timestr_tmpl.sh + # #----------------------------------------------------------------------- # -# Source the file containing the function that sources config files. +# Source the file that sources YAML files as if they were bash # #----------------------------------------------------------------------- # - . ${bashutils_dir}/source_config.sh - + . ${bashutils_dir}/source_yaml.sh } source_util_funcs diff --git a/ush/update_input_nml.py b/ush/update_input_nml.py index 0f10c675b2..b85bbacd4a 100644 --- a/ush/update_input_nml.py +++ b/ush/update_input_nml.py @@ -1,62 +1,41 @@ #!/usr/bin/env python3 +""" +Update the model namelist for a variety of different settings. +""" + +import argparse import os import sys -import argparse -import logging from textwrap import dedent +from uwtools.api.config import get_nml_config, realize + from python_utils import ( - import_vars, print_input_args, print_info_msg, - print_err_msg_exit, cfg_to_yaml_str, - load_shell_config, - flatten_dict, ) -from set_namelist import set_namelist +VERBOSE = os.environ.get("VERBOSE", "true") - -def update_input_nml(run_dir): +def update_input_nml(namelist, restart, aqm_na_13km): """Update the FV3 input.nml file in the specified run directory Args: - run_dir: run directory + namelist: path to the namelist + restart: should forecast start from restart? + aqm_na_13km: should the 13km AQM config be used? + Returns: Boolean """ print_input_args(locals()) - - # import all environment variables - import_vars() - - # - # ----------------------------------------------------------------------- - # - # Update the FV3 input.nml file in the specified run directory. - # - # ----------------------------------------------------------------------- - # - print_info_msg( - f""" - Updating the FV3 input.nml file in the specified run directory (run_dir): - run_dir = '{run_dir}'""", - verbose=VERBOSE, - ) - # - # ----------------------------------------------------------------------- - # - # Set new values of the specific parameters to be updated. - # - # ----------------------------------------------------------------------- - # settings = {} # For restart run - if args.restart: + if restart: settings["fv_core_nml"] = { "external_ic": False, "make_nh": False, @@ -69,105 +48,68 @@ def update_input_nml(run_dir): settings["gfs_physics_nml"] = { "nstf_name": [2, 0, 0, 0, 0], } - + # For AQM_NA_13km domain for air quality modeling - if args.aqm_na_13km: + if aqm_na_13km: settings["fv_core_nml"] = { "k_split": 1, "n_split": 8, } - settings_str = cfg_to_yaml_str(settings) - print_info_msg( dedent( f""" - The variable 'settings' specifying values to be used in the FV3 'input.nml' - file for restart has been set as follows:\n - settings =\n\n""" - ) - + settings_str, + Updating {namelist} + + The updated values are: + + {cfg_to_yaml_str(settings)} + + """ + ), verbose=VERBOSE, ) - # - # ----------------------------------------------------------------------- - # - # Call a python script to update the experiment's actual FV3 INPUT.NML - # file for restart. - # - # ----------------------------------------------------------------------- - # - fv3_input_nml_fp = os.path.join(run_dir, FV3_NML_FN) - - try: - set_namelist( - [ - "-q", - "-n", - fv3_input_nml_fp, - "-u", - settings_str, - "-o", - fv3_input_nml_fp, - ] - ) - except: - logging.exception( - dedent( - f""" - Call to python script set_namelist.py to generate an FV3 namelist file - failed. Parameters passed to this script are: - Full path to base namelist file: - fv3_input_nml_fp = '{fv3_input_nml_fp}' - Full path to output namelist file: - fv3_input_nml_fp = '{fv3_input_nml_fp}' - Namelist settings specified on command line:\n - settings =\n\n""" - ) - + settings_str - ) - return False - - return True + # Update the experiment's FV3 INPUT.NML file + realize( + input_config=namelist, + input_format="nml", + output_file=namelist, + output_format="nml", + update_config=get_nml_config(settings), + ) def parse_args(argv): """Parse command line arguments""" parser = argparse.ArgumentParser(description="Update FV3 input.nml file for restart.") parser.add_argument( - "-r", "--run_dir", - dest="run_dir", + "-n", "--namelist", + dest="namelist", required=True, - help="Run directory." - ) - - parser.add_argument( - "-p", "--path-to-defns", - dest="path_to_defns", - required=True, - help="Path to var_defns file.", + help="Path to namelist to update.", ) parser.add_argument( "--restart", action='store_true', - help='Update for restart') + help='Update for restart', + ) parser.add_argument( "--aqm_na_13km", action='store_true', - help='Update for AQM_NA_13km in air quality modeling') + help='Update for AQM_NA_13km in air quality modeling', + ) return parser.parse_args(argv) if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_shell_config(args.path_to_defns) - cfg = flatten_dict(cfg) - import_vars(dictionary=cfg) update_input_nml( - run_dir=args.run_dir, + namelist=args.namelist, + restart=args.restart, + aqm_na_13km=args.aqm_na_13km, ) diff --git a/ush/valid_param_vals.yaml b/ush/valid_param_vals.yaml index f432c0bd76..fd21b3e1cf 100644 --- a/ush/valid_param_vals.yaml +++ b/ush/valid_param_vals.yaml @@ -4,7 +4,7 @@ valid_vals_RUN_ENVIR: ["nco", "community"] valid_vals_VERBOSE: [True, False] valid_vals_DEBUG: [True, False] -valid_vals_MACHINE: ["HERA", "WCOSS2", "ORION", "HERCULES", "JET", "ODIN", "CHEYENNE", "DERECHO", "STAMPEDE", "LINUX", "MACOS", "NOAACLOUD", "SINGULARITY", "GAEA-C5"] +valid_vals_MACHINE: ["HERA", "WCOSS2", "ORION", "HERCULES", "JET", "ODIN", "CHEYENNE", "DERECHO", "STAMPEDE", "LINUX", "MACOS", "NOAACLOUD", "SINGULARITY", "GAEA"] valid_vals_SCHED: ["slurm", "pbspro", "lsf", "lsfcray", "none"] valid_vals_FCST_MODEL: ["ufs-weather-model"] valid_vals_WORKFLOW_MANAGER: ["rocoto", "ecflow", "none"] @@ -37,8 +37,8 @@ valid_vals_CCPP_PHYS_SUITE: [ "FV3_RAP" ] valid_vals_GFDLgrid_NUM_CELLS: [48, 96, 192, 384, 768, 1152, 3072] -valid_vals_EXTRN_MDL_NAME_ICS: ["GSMGFS", "FV3GFS", "UFS-CASE-STUDY", "GEFS", "GDAS", "RAP", "HRRR", "NAM"] -valid_vals_EXTRN_MDL_NAME_LBCS: ["GSMGFS", "FV3GFS", "UFS-CASE-STUDY", "GEFS", "GDAS", "RAP", "HRRR", "NAM"] +valid_vals_EXTRN_MDL_NAME_ICS: ["GSMGFS", "FV3GFS", "UFS-CASE-STUDY", "GEFS", "GDAS", "RAP", "HRRR", "RRFS", "NAM"] +valid_vals_EXTRN_MDL_NAME_LBCS: ["GSMGFS", "FV3GFS", "UFS-CASE-STUDY", "GEFS", "GDAS", "RAP", "HRRR", "RRFS", "NAM"] valid_vals_USE_USER_STAGED_EXTRN_FILES: [True, False] valid_vals_FV3GFS_FILE_FMT_ICS: ["nemsio", "grib2", "netcdf"] valid_vals_FV3GFS_FILE_FMT_LBCS: ["nemsio", "grib2", "netcdf"] diff --git a/ush/wrappers/job_cards/sbatch/get_ics.sbatch b/ush/wrappers/job_cards/sbatch/get_ics.sbatch index 5aca1c2e7f..17b6210eae 100644 --- a/ush/wrappers/job_cards/sbatch/get_ics.sbatch +++ b/ush/wrappers/job_cards/sbatch/get_ics.sbatch @@ -25,6 +25,6 @@ export ICS_OR_LBCS='ICS' $USHdir/load_modules_run_task.sh "get_extrn_ics" $JOBSdir/JREGIONAL_GET_EXTRN_MDL_FILES -# Gaea-c5 differences: +# Gaea differences: ##SBATCH --qos=normal ##SBATCH --clusters=es --partition=eslogin_c5 --export=NONE diff --git a/ush/wrappers/job_cards/sbatch/get_lbcs.sbatch b/ush/wrappers/job_cards/sbatch/get_lbcs.sbatch index fc747ece40..46a4aad45e 100644 --- a/ush/wrappers/job_cards/sbatch/get_lbcs.sbatch +++ b/ush/wrappers/job_cards/sbatch/get_lbcs.sbatch @@ -25,6 +25,6 @@ export ICS_OR_LBCS='LBCS' $USHdir/load_modules_run_task.sh "get_extrn_lbcs" $JOBSdir/JREGIONAL_GET_EXTRN_MDL_FILES -# Gaea-c5 differences: +# Gaea differences: ##SBATCH --qos=normal ##SBATCH --clusters=es --partition=eslogin_c5 --export=NONE diff --git a/ush/wrappers/job_cards/sbatch/make_grid.sbatch b/ush/wrappers/job_cards/sbatch/make_grid.sbatch index b8866af36f..4b7dbd218c 100644 --- a/ush/wrappers/job_cards/sbatch/make_grid.sbatch +++ b/ush/wrappers/job_cards/sbatch/make_grid.sbatch @@ -17,7 +17,7 @@ export JOBSdir=`grep JOBSdir $GLOBAL_VAR_DEFNS_FP | cut -d\' -f2` $USHdir/load_modules_run_task.sh "make_grid" $JOBSdir/JREGIONAL_MAKE_GRID -# Gaea-c5 differences: +# Gaea differences: ##SBATCH --qos=normal ##SBATCH --clusters=c5 --partition=batch --export=NONE #export nprocs='24' diff --git a/ush/wrappers/job_cards/sbatch/make_ics.sbatch b/ush/wrappers/job_cards/sbatch/make_ics.sbatch index 512eefeae5..729240bdbf 100644 --- a/ush/wrappers/job_cards/sbatch/make_ics.sbatch +++ b/ush/wrappers/job_cards/sbatch/make_ics.sbatch @@ -23,7 +23,7 @@ export NWGES_DIR=$PWD'/../../../nco_dirs/nwges/20190615' $USHdir/load_modules_run_task.sh "make_ics" $JOBSdir/JREGIONAL_MAKE_ICS -# Gaea-c5 differences: +# Gaea differences: ##SBATCH --qos=normal ##SBATCH --clusters=c5 --partition=batch --export=NONE #export nprocs='48' diff --git a/ush/wrappers/job_cards/sbatch/make_lbcs.sbatch b/ush/wrappers/job_cards/sbatch/make_lbcs.sbatch index ab1d1312c8..d4db098b28 100644 --- a/ush/wrappers/job_cards/sbatch/make_lbcs.sbatch +++ b/ush/wrappers/job_cards/sbatch/make_lbcs.sbatch @@ -25,7 +25,7 @@ export bcgrpnum='1' $USHdir/load_modules_run_task.sh "make_lbcs" $JOBSdir/JREGIONAL_MAKE_LBCS -# Gaea-c5 differences: +# Gaea differences: ##SBATCH --qos=normal ##SBATCH --clusters=c5 --partition=batch --export=NONE #export nprocs='48' diff --git a/ush/wrappers/job_cards/sbatch/make_orog.sbatch b/ush/wrappers/job_cards/sbatch/make_orog.sbatch index 3b440cbd42..b0c8d21e54 100644 --- a/ush/wrappers/job_cards/sbatch/make_orog.sbatch +++ b/ush/wrappers/job_cards/sbatch/make_orog.sbatch @@ -17,7 +17,7 @@ export JOBSdir=`grep JOBSdir $GLOBAL_VAR_DEFNS_FP | cut -d\' -f2` $USHdir/load_modules_run_task.sh "make_orog" $JOBSdir/JREGIONAL_MAKE_OROG -# Gaea-c5 differences: +# Gaea differences: ##SBATCH --qos=normal ##SBATCH --clusters=c5 --partition=batch --export=NONE #export nprocs='24' diff --git a/ush/wrappers/job_cards/sbatch/make_sfc_climo.sbatch b/ush/wrappers/job_cards/sbatch/make_sfc_climo.sbatch index b791288922..52769cb033 100644 --- a/ush/wrappers/job_cards/sbatch/make_sfc_climo.sbatch +++ b/ush/wrappers/job_cards/sbatch/make_sfc_climo.sbatch @@ -17,7 +17,7 @@ export JOBSdir=`grep JOBSdir $GLOBAL_VAR_DEFNS_FP | cut -d\' -f2` $USHdir/load_modules_run_task.sh "make_sfc_climo" $JOBSdir/JREGIONAL_MAKE_SFC_CLIMO -# Gaea-c5 differences: +# Gaea differences: ##SBATCH --qos=normal ##SBATCH --clusters=c5 --partition=batch --export=NONE #export nprocs='48' diff --git a/ush/wrappers/job_cards/sbatch/run_fcst.sbatch b/ush/wrappers/job_cards/sbatch/run_fcst.sbatch index 75abd6fc03..056fd70a28 100644 --- a/ush/wrappers/job_cards/sbatch/run_fcst.sbatch +++ b/ush/wrappers/job_cards/sbatch/run_fcst.sbatch @@ -21,7 +21,7 @@ export SLASH_ENSMEM_SUBDIR='/' $USHdir/load_modules_run_task.sh "run_fcst" $JOBSdir/JREGIONAL_RUN_FCST -# Gaea-c5 differences: +# Gaea differences: ##SBATCH --qos=normal ##SBATCH --clusters=c5 --partition=batch --export=NONE #export nprocs='48' diff --git a/ush/wrappers/job_cards/sbatch/run_post.sbatch b/ush/wrappers/job_cards/sbatch/run_post.sbatch index c2a24a7f5e..6af04693f3 100644 --- a/ush/wrappers/job_cards/sbatch/run_post.sbatch +++ b/ush/wrappers/job_cards/sbatch/run_post.sbatch @@ -26,7 +26,7 @@ for (( i=0; i<=$((num_fcst_hrs)); i++ )); do $USHdir/load_modules_run_task.sh "run_post" $JOBSdir/JREGIONAL_RUN_POST done -# Gaea-c5 differences: +# Gaea differences: ##SBATCH --qos=normal ##SBATCH --clusters=c5 --partition=batch --export=NONE diff --git a/ush/wrappers/run_fcst.sh b/ush/wrappers/run_fcst.sh index 7450de7cc5..c875cb16c0 100755 --- a/ush/wrappers/run_fcst.sh +++ b/ush/wrappers/run_fcst.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} export cyc=${DATE_FIRST_CYCL:8:2} diff --git a/ush/wrappers/run_get_ics.sh b/ush/wrappers/run_get_ics.sh index 0ee521a67d..494eab6850 100755 --- a/ush/wrappers/run_get_ics.sh +++ b/ush/wrappers/run_get_ics.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow task_get_extrn_ics ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} export cyc=${DATE_FIRST_CYCL:8:2} diff --git a/ush/wrappers/run_get_lbcs.sh b/ush/wrappers/run_get_lbcs.sh index 543ab6e47d..ec6fa23892 100755 --- a/ush/wrappers/run_get_lbcs.sh +++ b/ush/wrappers/run_get_lbcs.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow task_get_extrn_lbcs ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} export cyc=${DATE_FIRST_CYCL:8:2} diff --git a/ush/wrappers/run_integration_test.sh b/ush/wrappers/run_integration_test.sh new file mode 100755 index 0000000000..6ce6afb8ed --- /dev/null +++ b/ush/wrappers/run_integration_test.sh @@ -0,0 +1,14 @@ +#!/bin/sh +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +set -xa +source ${GLOBAL_VAR_DEFNS_FP} +export CDATE=${DATE_FIRST_CYCL} +export CYCLE_DIR=${EXPTDIR}/${CDATE} +export cyc=${DATE_FIRST_CYCL:8:2} +export PDY=${DATE_FIRST_CYCL:0:8} +export SLASH_ENSMEM_SUBDIR="" +export ENSMEM_INDX="" +export FCST_DIR=${EXPTDIR}/$PDY$cyc + +${JOBSdir}/JREGIONAL_INTEGRATION_TEST + diff --git a/ush/wrappers/run_make_grid.sh b/ush/wrappers/run_make_grid.sh index 2d55beaf94..f7a6f8aeed 100755 --- a/ush/wrappers/run_make_grid.sh +++ b/ush/wrappers/run_make_grid.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} diff --git a/ush/wrappers/run_make_ics.sh b/ush/wrappers/run_make_ics.sh index 5c629722fc..adcdc16180 100755 --- a/ush/wrappers/run_make_ics.sh +++ b/ush/wrappers/run_make_ics.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} export cyc=${DATE_FIRST_CYCL:8:2} diff --git a/ush/wrappers/run_make_lbcs.sh b/ush/wrappers/run_make_lbcs.sh index 27c94c127f..f9fe35d9da 100755 --- a/ush/wrappers/run_make_lbcs.sh +++ b/ush/wrappers/run_make_lbcs.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} export cyc=${DATE_FIRST_CYCL:8:2} diff --git a/ush/wrappers/run_make_orog.sh b/ush/wrappers/run_make_orog.sh index 5f02ff9599..ebc5259ec1 100755 --- a/ush/wrappers/run_make_orog.sh +++ b/ush/wrappers/run_make_orog.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} diff --git a/ush/wrappers/run_make_sfc_climo.sh b/ush/wrappers/run_make_sfc_climo.sh index fab33f75d6..8024f529fc 100755 --- a/ush/wrappers/run_make_sfc_climo.sh +++ b/ush/wrappers/run_make_sfc_climo.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} diff --git a/ush/wrappers/run_post.sh b/ush/wrappers/run_post.sh index 46ef104365..ca060acb1f 100755 --- a/ush/wrappers/run_post.sh +++ b/ush/wrappers/run_post.sh @@ -1,7 +1,10 @@ #!/usr/bin/env bash -export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.sh" +export GLOBAL_VAR_DEFNS_FP="${EXPTDIR}/var_defns.yaml" +. $USHdir/source_util_funcs.sh +for sect in workflow ; do + source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} +done set -xa -source ${GLOBAL_VAR_DEFNS_FP} export CDATE=${DATE_FIRST_CYCL} export CYCLE_DIR=${EXPTDIR}/${CDATE} export cyc=${DATE_FIRST_CYCL:8:2}