diff --git a/build.sh b/build.sh index 2922d45e5..502cf7f88 100755 --- a/build.sh +++ b/build.sh @@ -71,7 +71,7 @@ while getopts "p:t:c:hvdfa" opt; do done case ${BUILD_TARGET} in - hera | orion | hercules | wcoss2 | noaacloud) + hera | orion | hercules | wcoss2 | noaacloud | gaea) echo "Building GDASApp on $BUILD_TARGET" source $dir_root/ush/module-setup.sh module use $dir_root/modulefiles diff --git a/ci/run_ci.sh b/ci/run_ci.sh index 200cfd61b..b62f78a88 100755 --- a/ci/run_ci.sh +++ b/ci/run_ci.sh @@ -61,7 +61,7 @@ module use $GDAS_MODULE_USE module load GDAS/$TARGET echo "---------------------------------------------------" >> $outfile rm -rf log.ctest -ctest -R gdasapp --output-on-failure &>> log.ctest +ctest -E "manual" -R gdasapp --output-on-failure &>> log.ctest ctest_status=$? npassed=$(cat log.ctest | grep "tests passed") if [ $ctest_status -eq 0 ]; then diff --git a/modulefiles/EVA/wcoss2.lua b/modulefiles/EVA/wcoss2.lua new file mode 100644 index 000000000..b604dcdbf --- /dev/null +++ b/modulefiles/EVA/wcoss2.lua @@ -0,0 +1,25 @@ +help([[ +Load environment for running EVA. +]]) + +local pkgName = myModuleName() +local pkgVersion = myModuleVersion() +local pkgNameVer = myModuleFullName() + +conflict(pkgName) + +load("PrgEnv-intel/8.2.0") +load("craype") +load("cray-pals") +load("git/2.29.0") +load("intel/19.1.3.304") +load("python/3.10.4") +load("ve/evs/1.0") + +append_path("PATH", "/lfs/h2/emc/da/noscrub/emc.da/eva/opt/bin") +append_path("PYTHONPATH", "/lfs/h2/emc/da/noscrub/emc.da/eva/opt/") + +whatis("Name: ".. pkgName) +whatis("Version: ".. pkgVersion) +whatis("Category: EVA") +whatis("Description: Load all libraries needed for EVA") diff --git a/modulefiles/GDAS/gaea.intel.lua b/modulefiles/GDAS/gaea.intel.lua new file mode 100644 index 000000000..744995ca7 --- /dev/null +++ b/modulefiles/GDAS/gaea.intel.lua @@ -0,0 +1,96 @@ +help([[ +Load environment for running the GDAS application with Intel compilers and MPI. +]]) + +local pkgName = myModuleName() +local pkgVersion = myModuleVersion() +local pkgNameVer = myModuleFullName() + +prepend_path("MODULEPATH", '/ncrc/proj/epic/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core') +prepend_path("MODULEPATH", '/ncrc/proj/epic/rocoto/modulefiles') + +-- below two lines get us access to the spack-stack modules +load("stack-intel/2023.1.0") +load("stack-cray-mpich/8.1.25") +-- JCSDA has 'jedi-fv3-env/unified-dev', but we should load these manually as needed +load("cmake/3.23.1") +load("gettext/0.20.2") +--load("libunistring/1.1") +--load("libidn2/2.3.4") +load("pcre2/10.42") +load("curl/8.4.0") +load("zlib/1.2.13") +load("git/2.35.2") +load("pkg-config/0.29.2") +load("hdf5/1.14.0") +load("parallel-netcdf/1.12.2") +load("netcdf-c/4.9.2") +load("nccmp/1.9.0.1") +load("netcdf-fortran/4.6.1") +load("nco/5.0.6") +load("parallelio/2.5.10") +load("wget/1.21.3") +load("boost/1.83.0") +load("bufr/12.0.1") +load("git-lfs/2.11.0") +load("ecbuild/3.7.2") +load("openjpeg/2.3.1") +load("eccodes/2.32.0") +load("eigen/3.4.0") +load("openblas/0.3.24") +load("eckit/1.24.5") +load("fftw/3.3.10") +load("fckit/0.11.0") +load("fiat/1.2.0") +load("ectrans/1.2.0") +load("atlas/0.35.1") +load("sp/2.5.0") +load("gsl-lite/0.37.0") +load("libjpeg/2.1.0") +load("krb5/1.16.3") +load("libtirpc/1.3.3") +load("hdf/4.2.15") +load("jedi-cmake/1.4.0") +load("libpng/1.6.37") +--load("libxt/1.1.5") +--load("libxmu/1.1.4") +--load("libxpm/4.11.0") +load("libxaw/1.10.13") +load("udunits/2.2.28") +load("ncview/2.1.9") +load("netcdf-cxx4/4.3.1") +load("json/3.10.5") +load("crtm/2.4.0.1") +load("rocoto/1.3.6") +load("prod_util/2.1.1") + +load("py-jinja2/3.0.3") +load("py-netcdf4/1.5.8") +load("py-pybind11/2.11.0") +load("py-pycodestyle/2.11.0") +load("py-pyyaml/6.0") +load("py-scipy/1.11.3") +load("py-xarray/2023.7.0") +load("py-f90nml/1.4.3") +load("py-pip/23.1.2") + +-- hack for wxflow +--prepend_path("PYTHONPATH", "/scratch1/NCEPDEV/da/python/gdasapp/wxflow/20240307/src") + +setenv("CC","cc") +setenv("CXX","CC") +setenv("FC","ftn") + +local mpiexec = '/usr/bin/srun' +local mpinproc = '-n' +setenv('MPIEXEC_EXEC', mpiexec) +setenv('MPIEXEC_NPROC', mpinproc) + +setenv("CRTM_FIX","/gpfs/f5/ufs-ard/world-shared/GDASApp/fix/crtm/2.4.0") +setenv("GDASAPP_TESTDATA","/gpfs/f5/ufs-ard/world-shared/GDASApp/CI/data") +setenv("GDASAPP_UNIT_TEST_DATA_PATH", "/gpfs/f5/ufs-ard/world-shared/GDASApp/CI/data/test") + +whatis("Name: ".. "pkgName") +whatis("Version: ".. "pkgVersion") +whatis("Category: GDASApp") +whatis("Description: Load all libraries needed for GDASApp") diff --git a/modulefiles/GDAS/hera.intel.lua b/modulefiles/GDAS/hera.intel.lua index d8e389058..109b1e938 100644 --- a/modulefiles/GDAS/hera.intel.lua +++ b/modulefiles/GDAS/hera.intel.lua @@ -47,7 +47,7 @@ load("atlas/0.35.1") load("sp/2.5.0") load("gsl-lite/0.37.0") load("libjpeg/2.1.0") -load("krb5/1.15.1") +load("krb5/1.20.1") load("libtirpc/1.3.3") load("hdf/4.2.15") load("jedi-cmake/1.4.0") diff --git a/modulefiles/GDAS/wcoss2.intel.lua b/modulefiles/GDAS/wcoss2.intel.lua index d21099b2e..4607f0a09 100644 --- a/modulefiles/GDAS/wcoss2.intel.lua +++ b/modulefiles/GDAS/wcoss2.intel.lua @@ -29,12 +29,16 @@ load("eckit/1.24.4") load("fckit/0.11.0") load("atlas/0.35.0") load("nccmp") +load("nco/5.0.6") +load("gsl/2.7") +load("prod_util/2.0.14") +load("bufr/12.0.1") -- hack for pybind11 setenv("pybind11_ROOT", "/apps/spack/python/3.8.6/intel/19.1.3.304/pjn2nzkjvqgmjw4hmyz43v5x4jbxjzpk/lib/python3.8/site-packages/pybind11/share/cmake/pybind11") --- hack for wxflow ---prepend_path("PYTHONPATH", "/scratch1/NCEPDEV/da/python/gdasapp/wxflow/20240307/src") +-- hack for git-lfs +prepend_path("PATH", "/apps/spack/git-lfs/2.11.0/gcc/11.2.0/m6b6nl5kfqngfteqbggydc7kflxere3s/bin") local mpiexec = '/pe/intel/compilers_and_libraries_2020.4.304/linux/mpi/intel64/bin/mpirun' local mpinproc = '-n' diff --git a/parm/aero/berror/aero_diffparm.yaml.j2 b/parm/aero/berror/aero_diffparm.yaml.j2 new file mode 100644 index 000000000..d0b982a6e --- /dev/null +++ b/parm/aero/berror/aero_diffparm.yaml.j2 @@ -0,0 +1,47 @@ +geometry: + fms initialization: + namelist filename: ./fv3jedi/fmsmpp.nml + field table filename: ./fv3jedi/field_table + akbk: ./fv3jedi/akbk.nc4 + npx: {{ npx_ges }} + npy: {{ npy_ges }} + npz: {{ npz_ges }} + field metadata override: ./fv3jedi/fv3jedi_fieldmetadata_restart.yaml +date: '{{ current_cycle | to_isotime }}' +background: + datetime: '{{ current_cycle | to_isotime }}' + filetype: fms restart + datapath: ./bkg + filename_core: '{{ current_cycle | to_fv3time }}.fv_core.res.nc' + filename_trcr: '{{ current_cycle | to_fv3time }}.fv_tracer.res.nc' + filename_cplr: '{{ current_cycle | to_fv3time }}.coupler.res' + state variables: [mass_fraction_of_sulfate_in_air, + mass_fraction_of_hydrophobic_black_carbon_in_air, + mass_fraction_of_hydrophilic_black_carbon_in_air, + mass_fraction_of_hydrophobic_organic_carbon_in_air, + mass_fraction_of_hydrophilic_organic_carbon_in_air, + mass_fraction_of_dust001_in_air, mass_fraction_of_dust002_in_air, + mass_fraction_of_dust003_in_air, mass_fraction_of_dust004_in_air, + mass_fraction_of_dust005_in_air, mass_fraction_of_sea_salt001_in_air, + mass_fraction_of_sea_salt002_in_air, mass_fraction_of_sea_salt003_in_air, + mass_fraction_of_sea_salt004_in_air] + +background error: + covariance model: SABER + saber central block: + saber block name: diffusion + calibration: + normalization: + iterations: {{ diff_iter }} + groups: + - horizontal: + fixed value: {{ horiz_len }} + write: + filepath: ./diffusion_hz + - vertical: + levels: {{ npz_ges }} + fixed value: {{ fixed_val }} + as gaussian: true + write: + filepath: ./diffusion_vt + diff --git a/parm/aero/prep/prep_viirs.yaml.j2 b/parm/aero/prep/prep_viirs.yaml.j2 new file mode 100644 index 000000000..a4cff793b --- /dev/null +++ b/parm/aero/prep/prep_viirs.yaml.j2 @@ -0,0 +1,14 @@ +provider: VIIRSAOD +window begin: {{ window_begin | to_isotime }} +window end: {{ window_end | to_isotime }} +binning: + cressman radius: 100 + stride: 132 + min number of obs: 500 +variable: aerosolOpticalDepth +output file: {{ DATA }}/{{ OPREFIX }}viirs_{{sensor}}.{{ current_cycle | to_YMDH }}.nc4 +input files: {{ input_files }} +thinning: + threshold: 0 +channel: 4 +preqc: 0 diff --git a/parm/ioda/bufr2ioda/bufr2ioda_adpupa.json b/parm/ioda/bufr2ioda/bufr2ioda_adpupa.json new file mode 100644 index 000000000..bc75577f5 --- /dev/null +++ b/parm/ioda/bufr2ioda/bufr2ioda_adpupa.json @@ -0,0 +1,12 @@ +{ + "data_format": "bufr_d", + "data_type": "adpupa", + "cycle_type": "{{ RUN }}", + "cycle_datetime" : "{{ current_cycle | to_YMDH }}", + "dump_directory": "{{ DMPDIR }}", + "ioda_directory": "{{ COM_OBS }}", + "subsets": [ "NC002001, NC002002, NC002003, NC002004, NC002005" ], + "source": "NCEP data tank", + "data_provider": "U.S. NOAA", + "data_description": "UPPER-AIR (RAOB, PIBAL, RECCO, DROPS) REPORTS", +} diff --git a/parm/ioda/bufr2ioda/bufr2ioda_insitu_profile_marinemammals.json b/parm/ioda/bufr2ioda/bufr2ioda_insitu_profile_marinemammals.json index 8688ae9d7..f42bd0d93 100644 --- a/parm/ioda/bufr2ioda/bufr2ioda_insitu_profile_marinemammals.json +++ b/parm/ioda/bufr2ioda/bufr2ioda_insitu_profile_marinemammals.json @@ -2,7 +2,7 @@ "data_format" : "tesac", "subsets" : "TESAC", "source" : "NCEP data tank", - "data_type" : "marinemammals", + "data_type" : "marinemammal", "cycle_type" : "{{ RUN }}", "cycle_datetime" : "{{ current_cycle | to_YMDH }}", "dump_directory" : "{{ DMPDIR }}", diff --git a/parm/ioda/bufr2ioda/bufr2ioda_satwind_amv_ahi.json b/parm/ioda/bufr2ioda/bufr2ioda_satwnd_amv_ahi.json similarity index 100% rename from parm/ioda/bufr2ioda/bufr2ioda_satwind_amv_ahi.json rename to parm/ioda/bufr2ioda/bufr2ioda_satwnd_amv_ahi.json diff --git a/parm/ioda/bufr2ioda/bufr2ioda_satwind_amv_avhrr.json b/parm/ioda/bufr2ioda/bufr2ioda_satwnd_amv_avhrr.json similarity index 100% rename from parm/ioda/bufr2ioda/bufr2ioda_satwind_amv_avhrr.json rename to parm/ioda/bufr2ioda/bufr2ioda_satwnd_amv_avhrr.json diff --git a/parm/ioda/bufr2ioda/bufr2ioda_satwind_amv_goes.json b/parm/ioda/bufr2ioda/bufr2ioda_satwnd_amv_goes.json similarity index 100% rename from parm/ioda/bufr2ioda/bufr2ioda_satwind_amv_goes.json rename to parm/ioda/bufr2ioda/bufr2ioda_satwnd_amv_goes.json diff --git a/parm/ioda/bufr2ioda/bufr2ioda_satwnd_amv_leogeo.json b/parm/ioda/bufr2ioda/bufr2ioda_satwnd_amv_leogeo.json new file mode 100644 index 000000000..074240abc --- /dev/null +++ b/parm/ioda/bufr2ioda/bufr2ioda_satwnd_amv_leogeo.json @@ -0,0 +1,15 @@ +{ + "data_format" : "bufr_d", + "data_type" : "satwnd", + "cycle_type" : "{{ RUN }}", + "cycle_datetime" : "{{ current_cycle | to_YMDH }}", + "dump_directory" : "{{ DMPDIR }}", + "ioda_directory" : "{{ COM_OBS }}", + "subsets" : [ "NC005072" ], + "data_description" : "NC005072 LEOGEO SATWIND IR(LW)", + "data_provider" : "U.S. NOAA/NESDIS", + "sensor_info" : { "sensor_name": "LEOGEO", "sensor_full_name": "Combined Platform LEO/GEO", "sensor_id": 999 }, + "satellite_info" : [ + { "satellite_name": "multi", "satellite_full_name": "MULTIPLE-SATS", "satellite_id": 854, "launch time": "YYYYMMDD" }, + ] +} diff --git a/parm/ioda/bufr2ioda/bufr2ioda_satwind_amv_modis.json b/parm/ioda/bufr2ioda/bufr2ioda_satwnd_amv_modis.json similarity index 100% rename from parm/ioda/bufr2ioda/bufr2ioda_satwind_amv_modis.json rename to parm/ioda/bufr2ioda/bufr2ioda_satwnd_amv_modis.json diff --git a/parm/ioda/bufr2ioda/bufr2ioda_satwind_amv_seviri.json b/parm/ioda/bufr2ioda/bufr2ioda_satwnd_amv_seviri.json similarity index 100% rename from parm/ioda/bufr2ioda/bufr2ioda_satwind_amv_seviri.json rename to parm/ioda/bufr2ioda/bufr2ioda_satwnd_amv_seviri.json diff --git a/parm/ioda/bufr2ioda/bufr2ioda_satwind_amv_viirs.json b/parm/ioda/bufr2ioda/bufr2ioda_satwnd_amv_viirs.json similarity index 100% rename from parm/ioda/bufr2ioda/bufr2ioda_satwind_amv_viirs.json rename to parm/ioda/bufr2ioda/bufr2ioda_satwnd_amv_viirs.json diff --git a/parm/soca/letkf/letkf.yaml.j2 b/parm/soca/letkf/letkf.yaml.j2 new file mode 100644 index 000000000..3cf0284df --- /dev/null +++ b/parm/soca/letkf/letkf.yaml.j2 @@ -0,0 +1,69 @@ +geometry: + geom_grid_file: soca_gridspec.nc + mom6_input_nml: mom_input.nml + fields metadata: fields_metadata.yaml + +time window: + begin: &date '{{ ATM_WINDOW_BEGIN }}' + length: PT6H + +background: + members from template: + template: + date: '{{ ATM_WINDOW_MIDDLE }}' + ocn_filename: enkfgdas.ocean.t06z.inst.f009.nc + ice_filename: enkfgdas.ice.t06z.inst.f009.nc + read_from_file: 1 + basename: ./ens/mem%mem% + state variables: [socn, tocn, ssh, uocn, vocn, cicen] + pattern: '%mem%' + nmembers: {{ NMEM_ENS }} + +observations: + observers: + +driver: + do posterior observer: true + save posterior mean increment: true + save posterior mean: true + save posterior variance: true + save prior mean: true + save prior variance: true + +local ensemble DA: + solver: LETKF + inflation: + rtps: 0.5 + rtpp: 0.6 + mult: 1.1 + +output: + datadir: data_output/ + date: *date + exp: letkf + type: ens + +output mean prior: + datadir: data_output/ + date: *date + exp: letkf + type: fc + +output variance prior: + datadir: data_output/ + date: *date + exp: letkf + type: fc + +output variance posterior: + datadir: data_output/ + date: *date + exp: letkf + type: an + +output increment: + datadir: data_output/ + date: *date + exp: letkf.inc + type: an + diff --git a/parm/soca/obs/config/dev/insitu_profile_argo.yaml b/parm/soca/obs/config/dev/insitu_profile_argo.yaml deleted file mode 100644 index cda8dc6a6..000000000 --- a/parm/soca/obs/config/dev/insitu_profile_argo.yaml +++ /dev/null @@ -1,22 +0,0 @@ -obs space: - name: insitu_profile_argo - obsdatain: - engine: - type: H5File - obsfile: !ENV ${DATA}/obs/${OPREFIX}insitu_profile_argo.${PDY}${cyc}.nc4 - obsdataout: - engine: - type: H5File - obsfile: !ENV ${DATA}/diags/insitu_profile_argo.${PDY}${cyc}.nc4 - simulated variables: [waterTemperature, salinity] - observed variables: [waterTemperature, salinity] - io pool: - max pool size: 1 -obs operator: - name: Composite - components: - - name: InsituTemperature - variables: - - name: waterTemperature -obs error: - covariance model: diagonal diff --git a/parm/soca/obs/config/dev/insitu_profile_bathy.yaml b/parm/soca/obs/config/dev/insitu_profile_bathy.yaml deleted file mode 100644 index 5d8c45f19..000000000 --- a/parm/soca/obs/config/dev/insitu_profile_bathy.yaml +++ /dev/null @@ -1,158 +0,0 @@ -obs space: - name: insitu_profile_bathy - obsdatain: - engine: - type: H5File - obsfile: !ENV ${DATA}/obs/${OPREFIX}insitu_profile_bathy.${PDY}${cyc}.nc4 - obsdataout: - engine: - type: H5File - obsfile: !ENV ${DATA}/diags/insitu_profile_bathy.${PDY}${cyc}.nc4 - simulated variables: [waterTemperature] - io pool: - max pool size: 1 -obs operator: - name: InsituTemperature -obs error: - covariance model: diagonal - -#-------------------------------------------------------------------------------------- -# START OF OBS FILTERS -# The QC filters used here are based on the document by IODE that can be found at -# https://cdn.ioos.noaa.gov/media/2017/12/recommendations_in_situ_data_real_time_qc.pdf -#-------------------------------------------------------------------------------------- -obs filters: - - # Land Check - - filter: Domain Check - where: - - variable: {name: GeoVaLs/sea_area_fraction} - minvalue: 0.5 -#-------------------------------------------------------------------------------------- -## Filters for T: -#-------------------------------------------------------------------------------------- - #------------------------------------------------------------------------------------ - ### Global range test - #------------------------------------------------------------------------------------ - - filter: Bounds Check - filter variables: [{name: waterTemperature}] - minvalue: -2.5 - maxvalue: 40.0 - - #------------------------------------------------------------------------------------ - ### Regional range tests - #------------------------------------------------------------------------------------ - - #### Red Sea - #------------------------------------------------------------------------------------ - #### - #### the document linked here describes Red sea as the are between 10N, 40E; - #### 20N, 50E; 30N, 30E; 10N, 40E. But that would also include Gulf of Aden. - #### A more reasonable choice seemed to be a box that extends from 10 N to - #### 30 N and 30 E to 45 East . - #------------------------------------------------------------------------------------ - - filter: Bounds Check - filter variables: [{name: waterTemperature}] - minvalue: 21.7 - maxvalue: 40.0 - where: - - variable: - name: MetaData/latitude - minvalue: 10 - maxvalue: 30 - - variable: - name: MetaData/longitude - minvalue: 30 - - #### Mediterranean Sea - #----------------------------------------------------------------------------- - ##### Area 1/3 for Mediterranean Sea - - filter: Bounds Check - filter variables: [{name: waterTemperature}] - minvalue: 10.0 - maxvalue: 40.0 - where: - - variable: - name: MetaData/latitude - minvalue: 30 - maxvalue: 40 - - variable: - name: MetaData/longitude - minvalue: -6 - maxvalue: 40 - ##### Area 2/3 for Mediterranean Sea - - filter: Bounds Check - filter variables: [{name: waterTemperature}] - minvalue: 10.0 - maxvalue: 40.0 - where: - - variable: - name: MetaData/latitude - minvalue: 40 - maxvalue: 41.5 - - variable: - name: MetaData/longitude - minvalue: 20 - maxvalue: 30 - ##### Area 3/3 for Mediterranean Sea - - filter: Bounds Check - filter variables: [{name: waterTemperature}] - minvalue: 10.0 - maxvalue: 40.0 - where: - - variable: - name: MetaData/latitude - minvalue: 40 - maxvalue: 46 - - variable: - name: MetaData/longitude - minvalue: 0 - maxvalue: 20 - - #### Northwestern shelves - #----------------------------------------------------------------------------- - - filter: Bounds Check - filter variables: [{name: waterTemperature}] - minvalue: -2.0 - maxvalue: 24.0 - where: - - variable: - name: MetaData/latitude - minvalue: 50 - maxvalue: 60 - - variable: - name: MetaData/longitude - minvalue: -20 - maxvalue: 10 - #### Southwestern shelves - #----------------------------------------------------------------------------- - - filter: Bounds Check - filter variables: [{name: waterTemperature}] - minvalue: -2.0 - maxvalue: 30 - where: - - variable: - name: MetaData/latitude - minvalue: 25 - maxvalue: 50 - - variable: - name: MetaData/longitude - minvalue: -30 - maxvalue: 0 - - #### Arctic Sea - #----------------------------------------------------------------------------- - - filter: Bounds Check - filter variables: [{name: waterTemperature}] - minvalue: -1.92 - maxvalue: 25.0 - where: - - variable: - name: MetaData/latitude - minvalue: 60 - - - filter: Background Check - filter variables: [{name: waterTemperature}] - threshold: 5.0 - absolute threshold: 5.0 - diff --git a/parm/soca/obs/config/dev/insitu_profile_tesac.yaml b/parm/soca/obs/config/dev/insitu_profile_tesac.yaml deleted file mode 100644 index 8012d1b7e..000000000 --- a/parm/soca/obs/config/dev/insitu_profile_tesac.yaml +++ /dev/null @@ -1,256 +0,0 @@ -obs space: - name: insitu_profile_tesac - obsdatain: - engine: - type: H5File - obsfile: !ENV ${DATA}/obs/${OPREFIX}insitu_profile_tesac.${PDY}${cyc}.nc4 - obsdataout: - engine: - type: H5File - obsfile: !ENV ${DATA}/diags/insitu_profile_tesac.${PDY}${cyc}.nc4 - simulated variables: [waterTemperature] #, salinity] - observed variables: [waterTemperature] #, salinity] - io pool: - max pool size: 1 -obs operator: - name: Composite - components: - - name: InsituTemperature - variables: - - name: waterTemperature -#-------------------------------------------------------------------------------------- -# The lines below are commented out due to the composite obs operator failing looking -# for MetaData: air_pressure. -# A separate yaml file: insitu_profile_salinity.yaml is used for sailinity profiles -# -# - name: VertInterp -# observation alias file: ./obsop_name_map.yaml -# variables: -# - name: salinity -# vertical coornidate: sea_water_depth -# observation vertical coordinate: depth -# interpolation method: linear -#-------------------------------------------------------------------------------------- -#-------------------------------------------------------------------------------------- -# START OF OBS FILTERS -# The QC filters used here are based on the document by IODE that can be found at -# https://cdn.ioos.noaa.gov/media/2017/12/recommendations_in_situ_data_real_time_qc.pdf -#-------------------------------------------------------------------------------------- -obs filters: - - # Land Check - - filter: Domain Check - where: - - variable: {name: GeoVaLs/sea_area_fraction} - minvalue: 0.5 -#-------------------------------------------------------------------------------------- -## Filters for T: -#-------------------------------------------------------------------------------------- - #------------------------------------------------------------------------------------ - ### Global range test - #------------------------------------------------------------------------------------ - - filter: Bounds Check - filter variables: [{name: waterTemperature}] - minvalue: -2.5 - maxvalue: 40.0 - - #------------------------------------------------------------------------------------ - ### Regional range tests - #------------------------------------------------------------------------------------ - - #### Red Sea - #------------------------------------------------------------------------------------ - #### - #### the document linked here describes Red sea as the are between 10N, 40E; - #### 20N, 50E; 30N, 30E; 10N, 40E. But that would also include Gulf of Aden. - #### A more reasonable choice seemed to be a box that extends from 10 N to - #### 30 N and 30 E to 45 East . - #------------------------------------------------------------------------------------ - - filter: Bounds Check - filter variables: [{name: waterTemperature}] - minvalue: 21.7 - maxvalue: 40.0 - where: - - variable: - name: MetaData/latitude - minvalue: 10 - maxvalue: 30 - - variable: - name: MetaData/longitude - minvalue: 30 - maxvalue: 45 - - #### Mediterranean Sea - #----------------------------------------------------------------------------- - ##### Area 1/3 for Mediterranean Sea - - filter: Bounds Check - filter variables: [{name: waterTemperature}] - minvalue: 10.0 - maxvalue: 40.0 - where: - - variable: - name: MetaData/latitude - minvalue: 30 - maxvalue: 40 - - variable: - name: MetaData/longitude - minvalue: -6 - maxvalue: 40 - ##### Area 2/3 for Mediterranean Sea - - filter: Bounds Check - filter variables: [{name: waterTemperature}] - minvalue: 10.0 - maxvalue: 40.0 - where: - - variable: - name: MetaData/latitude - minvalue: 40 - maxvalue: 41.5 - - variable: - name: MetaData/longitude - minvalue: 20 - maxvalue: 30 - ##### Area 3/3 for Mediterranean Sea - - filter: Bounds Check - filter variables: [{name: waterTemperature}] - minvalue: 10.0 - maxvalue: 40.0 - where: - - variable: - name: MetaData/latitude - minvalue: 40 - maxvalue: 46 - - variable: - name: MetaData/longitude - minvalue: 0 - maxvalue: 20 - - #### Northwestern shelves - #----------------------------------------------------------------------------- - - filter: Bounds Check - filter variables: [{name: waterTemperature}] - minvalue: -2.0 - maxvalue: 24.0 - where: - - variable: - name: MetaData/latitude - minvalue: 50 - maxvalue: 60 - - variable: - name: MetaData/longitude - minvalue: -20 - maxvalue: 10 - #### Southwestern shelves - #----------------------------------------------------------------------------- - - filter: Bounds Check - filter variables: [{name: waterTemperature}] - minvalue: -2.0 - maxvalue: 30 - where: - - variable: - name: MetaData/latitude - minvalue: 25 - maxvalue: 50 - - variable: - name: MetaData/longitude - minvalue: -30 - maxvalue: 0 - - #### Arctic Sea - #----------------------------------------------------------------------------- - - filter: Bounds Check - filter variables: [{name: waterTemperature}] - minvalue: -1.92 - maxvalue: 25.0 - where: - - variable: - name: MetaData/latitude - minvalue: 60 - - - filter: Background Check - filter variables: [{name: waterTemperature}] - threshold: 5.0 - absolute threshold: 5.0 - -# TODO: uncomment when there is data -# #------------------------------------------------------------------------------- -# ### Spike test -# #----------------------------------------------------------------------------- -# - filter: Create Diagnostic Flags -# filter variables: -# - name: waterTemperature -# # - name: salinity -# flags: -# - name: spike -# initial value: false -# - name: step -# initial value: false -# -# - filter: Spike and Step Check -# filter variables: -# - name: ObsValue/waterTemperature -# dependent: ObsValue/waterTemperature # dy/ -# independent: MetaData/depth # dx -# count spikes: true -# count steps: true -# tolerance: -# nominal value: 10 # K nominal, in the case of temperature (not really) -# gradient: 0.1 # K/m - if dy/dx greater, could be a spike -# gradient x resolution: 10 # m - can't know dx to better precision -# factors: [1,1,0.5] # multiply tolerance, for ranges bounded by... -# x boundaries: [0,500,500] # ...these values of x (depth in m) -# boundary layer: -# x range: [0.0,300.0] # when bounded by these x values (depth in m)... -# step tolerance range: [0.0,-2.0] # ...relax tolerance for steps in boundary layer... -# maximum x interval: [50.0,100.0] # ...and ignore level if dx greater than this -# action: -# name: reject -# -# #### Count spikes -# #----------------------------------------------------------------------------- -# - filter: Variable Assignment # create derived obs value containing only T spikes -# assignments: -# - name: DerivedMetaData/waterTemperature_spikes -# type: int -# function: -# name: IntObsFunction/ProfileLevelCount -# options: -# where: -# - variable: -# name: DiagnosticFlags/spike/waterTemperature -# value: is_true -# -# #### Count steps -# #----------------------------------------------------------------------------- -# - filter: Variable Assignment # create derived obs value containing only T steps -# assignments: -# - name: DerivedMetaData/waterTemperature_steps -# type: int -# function: -# name: IntObsFunction/ProfileLevelCount -# options: -# where: -# - variable: -# name: DiagnosticFlags/step/waterTemperature -# value: is_true -# #### Count total rejections -# #----------------------------------------------------------------------------- -# - filter: Variable Assignment # compute sum 2*spikes+steps -# assignments: -# - name: DerivedMetaData/waterTemperature_rejections -# type: int -# function: -# name: IntObsFunction/LinearCombination -# options: -# variables: [DerivedMetaData/waterTemperature_spikes, DerivedMetaData/waterTemperature_steps] -# coefs: [2,1] -# #### Reject entire profile if total rejctions > threshold -# #----------------------------------------------------------------------------- -# - filter: Perform Action # reject whole profile if 2*spikes+steps>=rejection threshold -# where: -# - variable: -# name: DerivedMetaData/waterTemperature_rejections -# minvalue: 3 -# action: -# name: reject -# diff --git a/parm/soca/obs/config/dev/insitu_profile_tesac_salinity.yaml b/parm/soca/obs/config/dev/insitu_profile_tesac_salinity.yaml deleted file mode 100644 index 30938849b..000000000 --- a/parm/soca/obs/config/dev/insitu_profile_tesac_salinity.yaml +++ /dev/null @@ -1,253 +0,0 @@ -obs space: - name: insitu_profile_tesac_salinity - obsdatain: - engine: - type: H5File - obsfile: !ENV ${DATA}/obs/${OPREFIX}insitu_profile_tesac.${PDY}${cyc}.nc4 - obsdataout: - engine: - type: H5File - obsfile: !ENV ${DATA}/diags/insitu_profile_tesac_salinity.${PDY}${cyc}.nc4 - simulated variables: [salinity] - #observed variables: [waterTemperature, salinity] - io pool: - max pool size: 1 -obs operator: -#---------------------------------------------------------------------------------------- -# composite obs operator is throwing an error looking for MetaData: air_pressure -# Hence the lines below and above are commented out -# name: Composite -# components: -# - name: InsituTemperature -# variables: -# - name: waterTemperature -# - name: VertInterp -# observation alias file: ./obsop_name_map.yaml -# variables: -# - name: salinity -# vertical coornidate: sea_water_depth -# observation vertical coordinate: depth -# interpolation method: linear -#--------------------------------------------------------------------------------------- - name: VertInterp - observation alias file: ./obsop_name_map.yaml - vertical coordinate: sea_water_depth - observation vertical coordinate: depth - interpolation method: linear -obs error: - covariance model: diagonal - -#-------------------------------------------------------------------------------------- -# START OF OBS FILTERS -# The QC filters used here are based on the document by IODE that can be found at -# https://cdn.ioos.noaa.gov/media/2017/12/recommendations_in_situ_data_real_time_qc.pdf -#-------------------------------------------------------------------------------------- -obs filters: - - # Land Check - - filter: Domain Check - where: - - variable: {name: GeoVaLs/sea_area_fraction} - minvalue: 0.5 - -#------------------------------------------------------------------------------- -## Filters for S: -#------------------------------------------------------------------------------- - #----------------------------------------------------------------------------- - ### Global range test - #----------------------------------------------------------------------------- - - filter: Bounds Check - filter variables: [{name: salinity}] - minvalue: 2.0 - maxvalue: 41.0 - - #----------------------------------------------------------------------------- - ### Regional range test - #----------------------------------------------------------------------------- - #### Red Sea - #----------------------------------------------------------------------------- - #### - #### the document linked here describes Red sea as the are between 10N, 40E; - #### 20N, 50E; 30N, 30E; 10N, 40E. But that would also include Gulf of Aden. - #### A more reasonable choice seemed to be a box that extends from 10 N to - #### 30 N and 30 E to 45 East . - - - filter: Bounds Check - filter variables: [{name: salinity}] - minvalue: 2.0 - maxvalue: 41.0 - where: - - variable: - name: MetaData/latitude - minvalue: 10 - maxvalue: 30 - - variable: - name: MetaData/longitude - minvalue: 30 - maxvalue: 45 - - #### Mediterranean Sea - #----------------------------------------------------------------------------- - ##### Area 1/3 for Mediterranean Sea - - filter: Bounds Check - filter variables: [{name: salinity}] - minvalue: 2.0 - maxvalue: 40.0 - where: - - variable: - name: MetaData/latitude - minvalue: 30 - maxvalue: 40 - - variable: - name: MetaData/longitude - minvalue: -6 - maxvalue: 40 - ##### Area 2/3 for Mediterranean Sea - - filter: Bounds Check - filter variables: [{name: salinity}] - minvalue: 2.0 - maxvalue: 40.0 - where: - - variable: - name: MetaData/latitude - minvalue: 40 - maxvalue: 41.5 - - variable: - name: MetaData/longitude - minvalue: 20 - maxvalue: 30 - ##### Area 3/3 for Mediterranean Sea - - filter: Bounds Check - filter variables: [{name: salinity}] - minvalue: 2.0 - maxvalue: 40.0 - where: - - variable: - name: MetaData/latitude - minvalue: 40 - maxvalue: 46 - - variable: - name: MetaData/longitude - minvalue: 0 - maxvalue: 20 - - - #### Northwestern shelves - #----------------------------------------------------------------------------- - - filter: Bounds Check - filter variables: [{name: salinity}] - minvalue: 0.0 - maxvalue: 37.0 - where: - - variable: - name: MetaData/latitude - minvalue: 50 - maxvalue: 60 - - variable: - name: MetaData/longitude - minvalue: -20 - maxvalue: 10 - - #### Southwestern shelves - #----------------------------------------------------------------------------- - - filter: Bounds Check - filter variables: [{name: salinity}] - minvalue: 0.0 - maxvalue: 38 - where: - - variable: - name: MetaData/latitude - minvalue: 25 - maxvalue: 50 - - variable: - name: MetaData/longitude - minvalue: -30 - maxvalue: 0 - - #### Arctic Sea - #----------------------------------------------------------------------------- - - filter: Bounds Check - filter variables: [{name: salinity}] - minvalue: 2.0 - maxvalue: 40.0 - where: - - variable: - name: MetaData/latitude - minvalue: 60 - - - filter: Background Check - filter variables: [{name: salinity}] - threshold: 5.0 - absolute threshold: 5.0 - -# TODO: uncomment when there is data -# #------------------------------------------------------------------------------- -# ### Spike test -# #----------------------------------------------------------------------------- -# - filter: Spike and Step Check -# filter variables: -# - name: ObsValue/salinity -# dependent: ObsValue/salinity # dy/ -# independent: MetaData/depth # dx -# count spikes: true -# count steps: true -# tolerance: -# nominal value: 1.0 # PSU nominal, in the case of salinity (not really) -# threshold: 0.6 # weird salinity thing -# factors: [1,1,0.2] # multiply tolerance, for ranges bounded by... -# x boundaries: [0,200,600] # ...these values of x (depth in m) -# boundary layer: -# x range: [0.0,300.0] # when bounded by these x values (depth in m)... -# maximum x interval: [50.0,100.0] # ...and ignore level if dx greater than this -# action: -# name: reject -# -# #### Count spikes -# #----------------------------------------------------------------------------- -# - filter: Variable Assignment # create derived obs value containing only S spikes -# assignments: -# - name: DerivedMetaData/salinity_spikes -# type: int -# function: -# name: IntObsFunction/ProfileLevelCount -# options: -# where: -# - variable: -# name: DiagnosticFlags/spike/salinity -# value: is_true -# #### Count steps -# #----------------------------------------------------------------------------- -# - filter: Variable Assignment # create derived obs value containing only S steps -# assignments: -# - name: DerivedMetaData/salinity_steps -# type: int -# function: -# name: IntObsFunction/ProfileLevelCount -# options: -# where: -# - variable: -# name: DiagnosticFlags/step/salinity -# value: is_true -# #### Count total rejections -# #----------------------------------------------------------------------------- -# - filter: Variable Assignment # compute sum 2*spikes+steps -# assignments: -# - name: DerivedMetaData/salinity_rejections -# type: int -# function: -# name: IntObsFunction/LinearCombination -# options: -# variables: [DerivedMetaData/salinity_spikes, DerivedMetaData/salinity_steps] -# coefs: [2,1] -# #### Reject entire profile if total rejctions > threshold -# #----------------------------------------------------------------------------- -# - filter: Perform Action # reject whole profile if 2*spikes+steps>=rejection threshold -# where: -# - variable: -# name: DerivedMetaData/salinity_rejections -# minvalue: 3 -# action: -# name: reject -# #------------------------------------------------------------------------------- -# ### End of Spike test -# #----------------------------------------------------------------------------- diff --git a/parm/soca/obs/config/dev/insitu_surface_trkob.yaml b/parm/soca/obs/config/dev/insitu_surface_trkob.yaml deleted file mode 100644 index 9e101e5c0..000000000 --- a/parm/soca/obs/config/dev/insitu_surface_trkob.yaml +++ /dev/null @@ -1,45 +0,0 @@ -obs space: - name: insitu_surface_trkob - obsdatain: - engine: - type: H5File - obsfile: !ENV ${DATA}/obs/${OPREFIX}insitu_surface_trkob.${PDY}${cyc}.nc4 - obsdataout: - engine: - type: H5File - obsfile: !ENV ${DATA}/diags/insitu_surface_trkob.${PDY}${cyc}.nc4 - simulated variables: [seaSurfaceTemperature] - io pool: - max pool size: 1 -obs operator: - name: Identity - observation alias file: ./obsop_name_map.yaml -obs error: - covariance model: diagonal - -obs filters: -- filter: Domain Check - where: - - variable: {name: GeoVaLs/sea_area_fraction} - minvalue: 0.9 -- filter: Bounds Check - minvalue: 1.0 - maxvalue: 41.0 -- filter: Background Check - threshold: 5.0 -- filter: Domain Check - where: - - variable: {name: ObsError/seaSurfaceTemperature} - minvalue: 1.0e-8 -- filter: Domain Check - where: - - variable: { name: GeoVaLs/sea_ice_area_fraction} - maxvalue: 1.0e-5 -- filter: Domain Check - where: - - variable: {name: GeoVaLs/sea_surface_temperature} - minvalue: -1.0 -- filter: Domain Check - where: - - variable: {name: GeoVaLs/distance_from_coast} - minvalue: 100e3 diff --git a/parm/soca/obs/config/insitu_profile_argo.yaml b/parm/soca/obs/config/insitu_profile_argo.yaml new file mode 100644 index 000000000..9b054931f --- /dev/null +++ b/parm/soca/obs/config/insitu_profile_argo.yaml @@ -0,0 +1,603 @@ +obs space: + name: insitu_profile_argo + obsdatain: + engine: + type: H5File + obsfile: !ENV ${DATA}/obs/${OPREFIX}insitu_profile_argo.${PDY}${cyc}.nc4 + obsgrouping: + group variables: [latitude, longitude, dateTime] + sort variable: depth + sort group: MetaData + sort order: ascending + obsdataout: + engine: + type: H5File + obsfile: !ENV ${DATA}/diags/insitu_profile_argo.${PDY}${cyc}.nc4 + simulated variables: [waterTemperature, salinity] + observed variables: [waterTemperature, salinity] + derived variables: [waterPressure] + io pool: + max pool size: 1 +obs operator: + name: Composite + components: + - name: InsituTemperature + variables: + - name: waterTemperature + - name: VertInterp + # note: alias file is needed for now because of a conflict with "waterTemperature". + # The built-in alias maps to "ocean_temperature", but soca uses "sea_water_temperature" + observation alias file: obsop_name_map.yaml + variables: + - name: salinity + vertical coordinate: sea_water_depth + observation vertical coordinate: depth + interpolation method: linear +obs error: + covariance model: diagonal + +#------------------------------------------------------------------------------- +# START OF OBS FILTERS (work done by Kriti Bhargava) +# The QC filters used here are based on the document by IODE that can be found at +# https://cdn.ioos.noaa.gov/media/2017/12/recommendations_in_situ_data_real_time_qc.pdf +#------------------------------------------------------------------------------- +obs filters: + + # land check + - filter: Domain Check + where: + - variable: {name: GeoVaLs/sea_area_fraction} + minvalue: 0.5 + +#------------------------------------------------------------------------------- +## Filters for T: +#------------------------------------------------------------------------------- + #------------------------------------------------------------------------------- + ### Global range test + #----------------------------------------------------------------------------- + - filter: Bounds Check + filter variables: [{name: waterTemperature}] + minvalue: -2.5 + maxvalue: 40.0 + + #----------------------------------------------------------------------------- + ### Regional range tests + #----------------------------------------------------------------------------- + + #### Red Sea + #----------------------------------------------------------------------------- + #### + #### the document linked here describes Red sea as the are between 10N, 40E; + #### 20N, 50E; 30N, 30E; 10N, 40E. But that would also include Gulf of Aden. + #### A more reasonable choice seemed to be a box that extends from 10 N to + #### 30 N and 30 E to 45 East . + + - filter: Bounds Check + filter variables: [{name: waterTemperature}] + minvalue: 21.7 + maxvalue: 40.0 + where: + - variable: + name: MetaData/latitude + minvalue: 10 + maxvalue: 30 + - variable: + name: MetaData/longitude + minvalue: 30 + maxvalue: 45 + + #### Mediterranean Sea + #----------------------------------------------------------------------------- + ##### Area 1/3 for Mediterranean Sea + - filter: Bounds Check + filter variables: [{name: waterTemperature}] + minvalue: 10.0 + maxvalue: 40.0 + where: + - variable: + name: MetaData/latitude + minvalue: 30 + maxvalue: 40 + - variable: + name: MetaData/longitude + minvalue: -6 + maxvalue: 40 + ##### Area 2/3 for Mediterranean Sea + - filter: Bounds Check + filter variables: [{name: waterTemperature}] + minvalue: 10.0 + maxvalue: 40.0 + where: + - variable: + name: MetaData/latitude + minvalue: 40 + maxvalue: 41.5 + - variable: + name: MetaData/longitude + minvalue: 20 + maxvalue: 30 + ##### Area 3/3 for Mediterranean Sea + - filter: Bounds Check + filter variables: [{name: waterTemperature}] + minvalue: 10.0 + maxvalue: 40.0 + where: + - variable: + name: MetaData/latitude + minvalue: 40 + maxvalue: 46 + - variable: + name: MetaData/longitude + minvalue: 0 + maxvalue: 20 + + #### Northwestern shelves + #----------------------------------------------------------------------------- + - filter: Bounds Check + filter variables: [{name: waterTemperature}] + minvalue: -2.0 + maxvalue: 24.0 + where: + - variable: + name: MetaData/latitude + minvalue: 50 + maxvalue: 60 + - variable: + name: MetaData/longitude + minvalue: -20 + maxvalue: 10 + #### Southwestern shelves + #----------------------------------------------------------------------------- + - filter: Bounds Check + filter variables: [{name: waterTemperature}] + minvalue: -2.0 + maxvalue: 30 + where: + - variable: + name: MetaData/latitude + minvalue: 25 + maxvalue: 50 + - variable: + name: MetaData/longitude + minvalue: -30 + maxvalue: 0 + + #### Arctic Sea + #----------------------------------------------------------------------------- + - filter: Bounds Check + filter variables: [{name: waterTemperature}] + minvalue: -1.92 + maxvalue: 25.0 + where: + - variable: + name: MetaData/latitude + minvalue: 60 + + - filter: Background Check + filter variables: [{name: waterTemperature}] + threshold: 5.0 + absolute threshold: 5.0 + + #------------------------------------------------------------------------------- + ### Spike test + #----------------------------------------------------------------------------- + - filter: Create Diagnostic Flags + filter variables: + - name: waterTemperature + - name: salinity + flags: + - name: spike + initial value: false + - name: step + initial value: false + + - filter: Spike and Step Check + filter variables: + - name: ObsValue/waterTemperature + dependent: ObsValue/waterTemperature # dy/ + independent: MetaData/depth # dx + count spikes: true + count steps: true + tolerance: + nominal value: 10 # K nominal, in the case of temperature (not really) + gradient: 0.1 # K/m - if dy/dx greater, could be a spike + gradient x resolution: 10 # m - can't know dx to better precision + factors: [1,1,0.5] # multiply tolerance, for ranges bounded by... + x boundaries: [0,500,500] # ...these values of x (depth in m) + boundary layer: + x range: [0.0,300.0] # when bounded by these x values (depth in m)... + step tolerance range: [0.0,-2.0] # ...relax tolerance for steps in boundary layer... + maximum x interval: [50.0,100.0] # ...and ignore level if dx greater than this + action: + name: reject + + #### Count spikes + #----------------------------------------------------------------------------- + - filter: Variable Assignment # create derived obs value containing only T spikes + assignments: + - name: DerivedMetaData/waterTemperature_spikes + type: int + function: + name: IntObsFunction/ProfileLevelCount + options: + where: + - variable: + name: DiagnosticFlags/spike/waterTemperature + value: is_true + + #### Count steps + #----------------------------------------------------------------------------- + - filter: Variable Assignment # create derived obs value containing only T steps + assignments: + - name: DerivedMetaData/waterTemperature_steps + type: int + function: + name: IntObsFunction/ProfileLevelCount + options: + where: + - variable: + name: DiagnosticFlags/step/waterTemperature + value: is_true + #### Count total rejections + #----------------------------------------------------------------------------- + - filter: Variable Assignment # compute sum 2*spikes+steps + assignments: + - name: DerivedMetaData/waterTemperature_rejections + type: int + function: + name: IntObsFunction/LinearCombination + options: + variables: [DerivedMetaData/waterTemperature_spikes, DerivedMetaData/waterTemperature_steps] + coefs: [2,1] + #### Reject entire profile if total rejctions > threshold + #----------------------------------------------------------------------------- + - filter: Perform Action # reject whole profile if 2*spikes+steps>=rejection threshold + where: + - variable: + name: DerivedMetaData/waterTemperature_rejections + minvalue: 3 + action: + name: reject + +#------------------------------------------------------------------------------- +## Filters for S: +#------------------------------------------------------------------------------- + #----------------------------------------------------------------------------- + ### Global range test + #----------------------------------------------------------------------------- + - filter: Bounds Check + filter variables: [{name: salinity}] + minvalue: 2.0 + maxvalue: 41.0 + + #----------------------------------------------------------------------------- + ### Regional range test + #----------------------------------------------------------------------------- + #### Red Sea + #----------------------------------------------------------------------------- + #### + #### the document linked here describes Red sea as the are between 10N, 40E; + #### 20N, 50E; 30N, 30E; 10N, 40E. But that would also include Gulf of Aden. + #### A more reasonable choice seemed to be a box that extends from 10 N to + #### 30 N and 30 E to 45 East . + + - filter: Bounds Check + filter variables: [{name: salinity}] + minvalue: 2.0 + maxvalue: 41.0 + where: + - variable: + name: MetaData/latitude + minvalue: 10 + maxvalue: 30 + - variable: + name: MetaData/longitude + minvalue: 30 + maxvalue: 45 + + #### Mediterranean Sea + #----------------------------------------------------------------------------- + ##### Area 1/3 for Mediterranean Sea + - filter: Bounds Check + filter variables: [{name: salinity}] + minvalue: 2.0 + maxvalue: 40.0 + where: + - variable: + name: MetaData/latitude + minvalue: 30 + maxvalue: 40 + - variable: + name: MetaData/longitude + minvalue: -6 + maxvalue: 40 + ##### Area 2/3 for Mediterranean Sea + - filter: Bounds Check + filter variables: [{name: salinity}] + minvalue: 2.0 + maxvalue: 40.0 + where: + - variable: + name: MetaData/latitude + minvalue: 40 + maxvalue: 41.5 + - variable: + name: MetaData/longitude + minvalue: 20 + maxvalue: 30 + ##### Area 3/3 for Mediterranean Sea + - filter: Bounds Check + filter variables: [{name: salinity}] + minvalue: 2.0 + maxvalue: 40.0 + where: + - variable: + name: MetaData/latitude + minvalue: 40 + maxvalue: 46 + - variable: + name: MetaData/longitude + minvalue: 0 + maxvalue: 20 + + + #### Northwestern shelves + #----------------------------------------------------------------------------- + - filter: Bounds Check + filter variables: [{name: salinity}] + minvalue: 0.0 + maxvalue: 37.0 + where: + - variable: + name: MetaData/latitude + minvalue: 50 + maxvalue: 60 + - variable: + name: MetaData/longitude + minvalue: -20 + maxvalue: 10 + + #### Southwestern shelves + #----------------------------------------------------------------------------- + - filter: Bounds Check + filter variables: [{name: salinity}] + minvalue: 0.0 + maxvalue: 38 + where: + - variable: + name: MetaData/latitude + minvalue: 25 + maxvalue: 50 + - variable: + name: MetaData/longitude + minvalue: -30 + maxvalue: 0 + + #### Arctic Sea + #----------------------------------------------------------------------------- + - filter: Bounds Check + filter variables: [{name: salinity}] + minvalue: 2.0 + maxvalue: 40.0 + where: + - variable: + name: MetaData/latitude + minvalue: 60 + + - filter: Background Check + filter variables: [{name: salinity}] + threshold: 5.0 + absolute threshold: 5.0 + + #------------------------------------------------------------------------------- + ### Spike test + #----------------------------------------------------------------------------- + - filter: Spike and Step Check + filter variables: + - name: ObsValue/salinity + dependent: ObsValue/salinity # dy/ + independent: MetaData/depth # dx + count spikes: true + count steps: true + tolerance: + nominal value: 1.0 # PSU nominal, in the case of salinity (not really) + threshold: 0.6 # weird salinity thing + factors: [1,1,0.2] # multiply tolerance, for ranges bounded by... + x boundaries: [0,200,600] # ...these values of x (depth in m) + boundary layer: + x range: [0.0,300.0] # when bounded by these x values (depth in m)... + maximum x interval: [50.0,100.0] # ...and ignore level if dx greater than this + action: + name: reject + + #### Count spikes + #----------------------------------------------------------------------------- + - filter: Variable Assignment # create derived obs value containing only S spikes + assignments: + - name: DerivedMetaData/salinity_spikes + type: int + function: + name: IntObsFunction/ProfileLevelCount + options: + where: + - variable: + name: DiagnosticFlags/spike/salinity + value: is_true + #### Count steps + #----------------------------------------------------------------------------- + - filter: Variable Assignment # create derived obs value containing only S steps + assignments: + - name: DerivedMetaData/salinity_steps + type: int + function: + name: IntObsFunction/ProfileLevelCount + options: + where: + - variable: + name: DiagnosticFlags/step/salinity + value: is_true + #### Count total rejections + #----------------------------------------------------------------------------- + - filter: Variable Assignment # compute sum 2*spikes+steps + assignments: + - name: DerivedMetaData/salinity_rejections + type: int + function: + name: IntObsFunction/LinearCombination + options: + variables: [DerivedMetaData/salinity_spikes, DerivedMetaData/salinity_steps] + coefs: [2,1] + #### Reject entire profile if total rejctions > threshold + #----------------------------------------------------------------------------- + - filter: Perform Action # reject whole profile if 2*spikes+steps>=rejection threshold + where: + - variable: + name: DerivedMetaData/salinity_rejections + minvalue: 3 + action: + name: reject + #------------------------------------------------------------------------------- + ### End of Spike test + #----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +## Filters on the whole profile: +#----------------------------------------------------------------------------- + #------------------------------------------------------------------------------- + ### Ocean Vertical stability test + #----------------------------------------------------------------------------- + + #### get pressure from depth + #----------------------------------------------------------------------------- + - filter: Variable Transforms + Transform: OceanDepthToPressure + ocean depth variable: depth + ocean depth group: MetaData + #### Create diagonostic flags for spike step + #----------------------------------------------------------------------------- + - filter: Create Diagnostic Flags + filter variables: + - name: DerivedObsValue/waterPressure + flags: + - name: DensitySpike + initial value: false + - name: DensityStep + initial value: false + - name: Superadiabat + initial value: false + #### + #----------------------------------------------------------------------------- + - filter: Ocean Vertical Stability Check + where: + - variable: + name: ObsValue/waterTemperature + value: is_valid + filter variables: + - name: DerivedObsValue/waterPressure # density spikes/steps --> flag d + variables: + temperature: ObsValue/waterTemperature + salinity: ObsValue/salinity + pressure: DerivedObsValue/waterPressure + count spikes: true + count steps: true + nominal tolerance: -0.05 + threshold: 0.25 + actions: + - name: set + flag: Superadiabat + - name: reject + #### where there are any density inversions, reject temperature only: + #----------------------------------------------------------------------------- + - filter: Perform Action + filter variables: + - name: ObsValue/waterTemperature + where: + - variable: + name: DiagnosticFlags/Superadiabat/waterPressure + value: is_true + action: + name: reject + #### where density spikes, reject all vars (temperature and salinity): + #----------------------------------------------------------------------------- + - filter: Perform Action + filter variables: + - name: ObsValue/waterTemperature + - name: ObsValue/salinity + where: + - variable: + name: DiagnosticFlags/DensitySpike/waterPressure + value: is_true + action: + name: reject + #### create derived metadata counting levels: + #----------------------------------------------------------------------------- + - filter: Variable Assignment + assignments: + - name: DerivedMetaData/number_of_levels + type: int + function: + name: IntObsFunction/ProfileLevelCount + options: + where: + - variable: + name: ObsValue/waterTemperature + value: is_valid + #### create derived metadata counting spikes and steps: + #----------------------------------------------------------------------------- + - filter: Variable Assignment + assignments: + - name: DerivedMetaData/ocean_density_inversions + type: int + function: + name: IntObsFunction/ProfileLevelCount + options: + where: + - variable: + name: DiagnosticFlags/DensitySpike/waterPressure + value: is_true + - variable: + name: DiagnosticFlags/DensityStep/waterPressure + value: is_true + where operator: or + + #### whole profile is rejected if spikes+steps >= numlev/4, so compute + #### 4*( sum spikes+steps ) minus numlev + #### in order to check it against 0: + #----------------------------------------------------------------------------- + - filter: Variable Assignment + assignments: + - name: DerivedMetaData/ocean_density_rejections + type: int + function: + name: IntObsFunction/LinearCombination + options: + variables: [DerivedMetaData/ocean_density_inversions, DerivedMetaData/number_of_levels] + coefs: [4, -1] + #### reject whole profile if spikes+steps >= numlev/4 AND >= 2: + #----------------------------------------------------------------------------- + - filter: Perform Action + filter variables: + - name: ObsValue/waterTemperature + - name: ObsValue/salinity + where: + - variable: + name: DerivedMetaData/ocean_density_rejections + minvalue: 0 + - variable: + name: DerivedMetaData/ocean_density_inversions + minvalue: 2 + where operator: and + action: + name: reject + #------------------------------------------------------------------------------- + ### End of ocean vertical stability test + #----------------------------------------------------------------------------- + + #----------------------------------------------------------------------------- + ### If T was bad, remove S as well regardless + #----------------------------------------------------------------------------- + - filter: RejectList + where: + - variable: QCflagsData/waterTemperature + minvalue: 1 + defer to post: true diff --git a/parm/soca/obs/config/insitu_profile_bathy.yaml b/parm/soca/obs/config/insitu_profile_bathy.yaml new file mode 100644 index 000000000..8742f44fc --- /dev/null +++ b/parm/soca/obs/config/insitu_profile_bathy.yaml @@ -0,0 +1,17 @@ +obs space: + name: insitu_profile_bathy + obsdatain: + engine: + type: H5File + obsfile: !ENV ${DATA}/obs/${OPREFIX}insitu_profile_bathy.${PDY}${cyc}.nc4 + obsdataout: + engine: + type: H5File + obsfile: !ENV ${DATA}/diags/insitu_profile_bathy.${PDY}${cyc}.nc4 + simulated variables: [waterTemperature] + io pool: + max pool size: 1 +obs operator: + name: InsituTemperature +obs error: + covariance model: diagonal diff --git a/parm/soca/obs/config/dev/insitu_profile_dbuoy.yaml b/parm/soca/obs/config/insitu_profile_dbuoy.yaml similarity index 82% rename from parm/soca/obs/config/dev/insitu_profile_dbuoy.yaml rename to parm/soca/obs/config/insitu_profile_dbuoy.yaml index 3ab948b9d..5f81902e2 100644 --- a/parm/soca/obs/config/dev/insitu_profile_dbuoy.yaml +++ b/parm/soca/obs/config/insitu_profile_dbuoy.yaml @@ -8,8 +8,8 @@ obs space: engine: type: H5File obsfile: !ENV ${DATA}/diags/insitu_profile_dbuoy.${PDY}${cyc}.nc4 - simulated variables: [waterTemperature, salinity] - observed variables: [waterTemperature, salinity] + simulated variables: [waterTemperature] + observed variables: [waterTemperature] io pool: max pool size: 1 obs operator: diff --git a/parm/soca/obs/config/dev/insitu_profile_dbuoyb.yaml b/parm/soca/obs/config/insitu_profile_dbuoyb.yaml similarity index 82% rename from parm/soca/obs/config/dev/insitu_profile_dbuoyb.yaml rename to parm/soca/obs/config/insitu_profile_dbuoyb.yaml index cc11a33df..29052312a 100644 --- a/parm/soca/obs/config/dev/insitu_profile_dbuoyb.yaml +++ b/parm/soca/obs/config/insitu_profile_dbuoyb.yaml @@ -8,8 +8,8 @@ obs space: engine: type: H5File obsfile: !ENV ${DATA}/diags/insitu_profile_dbuoyb.${PDY}${cyc}.nc4 - simulated variables: [waterTemperature, salinity] - observed variables: [waterTemperature, salinity] + simulated variables: [waterTemperature] + observed variables: [waterTemperature] io pool: max pool size: 1 obs operator: diff --git a/parm/soca/obs/config/dev/insitu_profile_glider.yaml b/parm/soca/obs/config/insitu_profile_glider.yaml similarity index 82% rename from parm/soca/obs/config/dev/insitu_profile_glider.yaml rename to parm/soca/obs/config/insitu_profile_glider.yaml index 8d92d3630..17242b585 100644 --- a/parm/soca/obs/config/dev/insitu_profile_glider.yaml +++ b/parm/soca/obs/config/insitu_profile_glider.yaml @@ -8,8 +8,8 @@ obs space: engine: type: H5File obsfile: !ENV ${DATA}/diags/insitu_profile_glider.${PDY}${cyc}.nc4 - simulated variables: [waterTemperature, salinity] - observed variables: [waterTemperature, salinity] + simulated variables: [waterTemperature] + observed variables: [waterTemperature] io pool: max pool size: 1 obs operator: diff --git a/parm/soca/obs/config/dev/insitu_profile_marinemammal.yaml b/parm/soca/obs/config/insitu_profile_marinemammal.yaml similarity index 82% rename from parm/soca/obs/config/dev/insitu_profile_marinemammal.yaml rename to parm/soca/obs/config/insitu_profile_marinemammal.yaml index 70a3aefe2..682b02f0a 100644 --- a/parm/soca/obs/config/dev/insitu_profile_marinemammal.yaml +++ b/parm/soca/obs/config/insitu_profile_marinemammal.yaml @@ -8,8 +8,8 @@ obs space: engine: type: H5File obsfile: !ENV ${DATA}/diags/insitu_profile_marinemammal.${PDY}${cyc}.nc4 - simulated variables: [waterTemperature, salinity] - observed variables: [waterTemperature, salinity] + simulated variables: [waterTemperature] + observed variables: [waterTemperature] io pool: max pool size: 1 obs operator: diff --git a/parm/soca/obs/config/dev/insitu_profile_mbuoy.yaml b/parm/soca/obs/config/insitu_profile_mbuoy.yaml similarity index 82% rename from parm/soca/obs/config/dev/insitu_profile_mbuoy.yaml rename to parm/soca/obs/config/insitu_profile_mbuoy.yaml index e5e831084..c2b2ca614 100644 --- a/parm/soca/obs/config/dev/insitu_profile_mbuoy.yaml +++ b/parm/soca/obs/config/insitu_profile_mbuoy.yaml @@ -8,8 +8,8 @@ obs space: engine: type: H5File obsfile: !ENV ${DATA}/diags/insitu_profile_mbuoy.${PDY}${cyc}.nc4 - simulated variables: [waterTemperature, salinity] - observed variables: [waterTemperature, salinity] + simulated variables: [waterTemperature] + observed variables: [waterTemperature] io pool: max pool size: 1 obs operator: diff --git a/parm/soca/obs/config/dev/insitu_profile_mbuoyb.yaml b/parm/soca/obs/config/insitu_profile_mbuoyb.yaml similarity index 82% rename from parm/soca/obs/config/dev/insitu_profile_mbuoyb.yaml rename to parm/soca/obs/config/insitu_profile_mbuoyb.yaml index 03f730aee..f8ad55e6d 100644 --- a/parm/soca/obs/config/dev/insitu_profile_mbuoyb.yaml +++ b/parm/soca/obs/config/insitu_profile_mbuoyb.yaml @@ -8,8 +8,8 @@ obs space: engine: type: H5File obsfile: !ENV ${DATA}/diags/insitu_profile_mbuoyb.${PDY}${cyc}.nc4 - simulated variables: [waterTemperature, salinity] - observed variables: [waterTemperature, salinity] + simulated variables: [waterTemperature] + observed variables: [waterTemperature] io pool: max pool size: 1 obs operator: diff --git a/parm/soca/obs/config/insitu_profile_tesac.yaml b/parm/soca/obs/config/insitu_profile_tesac.yaml new file mode 100644 index 000000000..bc8342dc8 --- /dev/null +++ b/parm/soca/obs/config/insitu_profile_tesac.yaml @@ -0,0 +1,20 @@ +obs space: + name: insitu_profile_tesac + obsdatain: + engine: + type: H5File + obsfile: !ENV ${DATA}/obs/${OPREFIX}insitu_profile_tesac.${PDY}${cyc}.nc4 + obsdataout: + engine: + type: H5File + obsfile: !ENV ${DATA}/diags/insitu_profile_tesac.${PDY}${cyc}.nc4 + simulated variables: [waterTemperature] + observed variables: [waterTemperature] + io pool: + max pool size: 1 +obs operator: + name: Composite + components: + - name: InsituTemperature + variables: + - name: waterTemperature diff --git a/parm/soca/obs/config/insitu_profile_tesac_salinity.yaml b/parm/soca/obs/config/insitu_profile_tesac_salinity.yaml new file mode 100644 index 000000000..9190e46ef --- /dev/null +++ b/parm/soca/obs/config/insitu_profile_tesac_salinity.yaml @@ -0,0 +1,22 @@ +obs space: + name: insitu_profile_tesac_salinity + obsdatain: + engine: + type: H5File + obsfile: !ENV ${DATA}/obs/${OPREFIX}insitu_profile_tesac.${PDY}${cyc}.nc4 + obsdataout: + engine: + type: H5File + obsfile: !ENV ${DATA}/diags/insitu_profile_tesac_salinity.${PDY}${cyc}.nc4 + simulated variables: [salinity] + observed variables: [salinity] + io pool: + max pool size: 1 +obs operator: + name: VertInterp + observation alias file: ./obsop_name_map.yaml + vertical coordinate: sea_water_depth + observation vertical coordinate: depth + interpolation method: linear +obs error: + covariance model: diagonal diff --git a/parm/soca/obs/config/dev/insitu_profile_xbtctd.yaml b/parm/soca/obs/config/insitu_profile_xbtctd.yaml similarity index 82% rename from parm/soca/obs/config/dev/insitu_profile_xbtctd.yaml rename to parm/soca/obs/config/insitu_profile_xbtctd.yaml index 148f9b3d7..a868fc8c7 100644 --- a/parm/soca/obs/config/dev/insitu_profile_xbtctd.yaml +++ b/parm/soca/obs/config/insitu_profile_xbtctd.yaml @@ -8,8 +8,8 @@ obs space: engine: type: H5File obsfile: !ENV ${DATA}/diags/insitu_profile_xbtctd.${PDY}${cyc}.nc4 - simulated variables: [waterTemperature, salinity] - observed variables: [waterTemperature, salinity] + simulated variables: [waterTemperature] + observed variables: [waterTemperature] io pool: max pool size: 1 obs operator: diff --git a/parm/soca/obs/config/dev/insitu_surface_altkob.yaml b/parm/soca/obs/config/insitu_surface_altkob.yaml similarity index 100% rename from parm/soca/obs/config/dev/insitu_surface_altkob.yaml rename to parm/soca/obs/config/insitu_surface_altkob.yaml diff --git a/parm/soca/obs/config/insitu_surface_trkob.yaml b/parm/soca/obs/config/insitu_surface_trkob.yaml new file mode 100644 index 000000000..e35230223 --- /dev/null +++ b/parm/soca/obs/config/insitu_surface_trkob.yaml @@ -0,0 +1,18 @@ +obs space: + name: insitu_surface_trkob + obsdatain: + engine: + type: H5File + obsfile: !ENV ${DATA}/obs/${OPREFIX}insitu_surface_trkob.${PDY}${cyc}.nc4 + obsdataout: + engine: + type: H5File + obsfile: !ENV ${DATA}/diags/insitu_surface_trkob.${PDY}${cyc}.nc4 + simulated variables: [seaSurfaceTemperature] + io pool: + max pool size: 1 +obs operator: + name: Identity + observation alias file: ./obsop_name_map.yaml +obs error: + covariance model: diagonal diff --git a/parm/soca/obs/config/dev/insitu_surface_trkob_salinity.yaml b/parm/soca/obs/config/insitu_surface_trkob_salinity.yaml similarity index 100% rename from parm/soca/obs/config/dev/insitu_surface_trkob_salinity.yaml rename to parm/soca/obs/config/insitu_surface_trkob_salinity.yaml diff --git a/parm/soca/obs/obs_list.yaml b/parm/soca/obs/obs_list.yaml index 7a367c98f..f7be24da5 100644 --- a/parm/soca/obs/obs_list.yaml +++ b/parm/soca/obs/obs_list.yaml @@ -3,30 +3,30 @@ observers: - !INC ${OBS_YAML_DIR}/adt_rads_all.yaml # SST -#- !INC ${OBS_YAML_DIR}/sst_avhrr_ma_l3u.yaml -#- !INC ${OBS_YAML_DIR}/sst_avhrr_mb_l3u.yaml -#- !INC ${OBS_YAML_DIR}/sst_avhrr_mc_l3u.yaml -#- !INC ${OBS_YAML_DIR}/sst_viirs_npp_l3u.yaml -#- !INC ${OBS_YAML_DIR}/sst_viirs_n20_l3u.yaml -#- !INC ${OBS_YAML_DIR}/sst_abi_g16_l3c.yaml -#- !INC ${OBS_YAML_DIR}/sst_abi_g17_l3c.yaml -#- !INC ${OBS_YAML_DIR}/sst_ahi_h08_l3c.yaml +- !INC ${OBS_YAML_DIR}/sst_avhrr_ma_l3u.yaml +- !INC ${OBS_YAML_DIR}/sst_avhrr_mb_l3u.yaml +- !INC ${OBS_YAML_DIR}/sst_avhrr_mc_l3u.yaml +- !INC ${OBS_YAML_DIR}/sst_viirs_npp_l3u.yaml +- !INC ${OBS_YAML_DIR}/sst_viirs_n20_l3u.yaml +- !INC ${OBS_YAML_DIR}/sst_abi_g16_l3c.yaml +- !INC ${OBS_YAML_DIR}/sst_abi_g17_l3c.yaml +- !INC ${OBS_YAML_DIR}/sst_ahi_h08_l3c.yaml # Ice concentration - !INC ${OBS_YAML_DIR}/icec_amsr2_north.yaml - !INC ${OBS_YAML_DIR}/icec_amsr2_south.yaml # in situ: monthly -#- !INC ${OBS_YAML_DIR}/insitu_profile_bathy.yaml -#- !INC ${OBS_YAML_DIR}/insitu_profile_argo.yaml -#- !INC ${OBS_YAML_DIR}/insitu_profile_glider.yaml -#- !INC ${OBS_YAML_DIR}/insitu_profile_tesac.yaml -#- !INC ${OBS_YAML_DIR}/insitu_profile_tesac_salinity.yaml -#- !INC ${OBS_YAML_DIR}/insitu_profile_marinemammal.yaml -#- !INC ${OBS_YAML_DIR}/insitu_profile_xbtctd.yaml -#- !INC ${OBS_YAML_DIR}/insitu_surface_altkob.yaml -#- !INC ${OBS_YAML_DIR}/insitu_surface_trkob.yaml -#- !INC ${OBS_YAML_DIR}/insitu_surface_trkob_salinity.yaml +- !INC ${OBS_YAML_DIR}/insitu_profile_bathy.yaml +- !INC ${OBS_YAML_DIR}/insitu_profile_argo.yaml +- !INC ${OBS_YAML_DIR}/insitu_profile_glider.yaml +- !INC ${OBS_YAML_DIR}/insitu_profile_tesac.yaml +- !INC ${OBS_YAML_DIR}/insitu_profile_tesac_salinity.yaml +- !INC ${OBS_YAML_DIR}/insitu_profile_marinemammal.yaml +- !INC ${OBS_YAML_DIR}/insitu_profile_xbtctd.yaml +- !INC ${OBS_YAML_DIR}/insitu_surface_altkob.yaml +- !INC ${OBS_YAML_DIR}/insitu_surface_trkob.yaml +- !INC ${OBS_YAML_DIR}/insitu_surface_trkob_salinity.yaml # in situ: daily #- !INC ${OBS_YAML_DIR}/insitu_profile_dbuoy.yaml diff --git a/scripts/exgdas_global_marine_analysis_vrfy.py b/scripts/exgdas_global_marine_analysis_vrfy.py index 2b4d0b1d7..36d00bdb6 100755 --- a/scripts/exgdas_global_marine_analysis_vrfy.py +++ b/scripts/exgdas_global_marine_analysis_vrfy.py @@ -125,7 +125,7 @@ def plot_marine_vrfy(config): projs=['North', 'South', 'Global'], comout=os.path.join(comout, 'vrfy', 'ana')), # sea ice analysis plotConfig(grid_file=grid_file, - data_file=os.path.join(com_ice_history, f'{RUN}.t{gcyc}z.icef006.nc'), + data_file=os.path.join(com_ice_history, f'{RUN}.ice.t{gcyc}z.inst.f006.nc'), variables_horiz={'aice_h': [0.0, 1.0], 'hs_h': [0.0, 4.0], 'hi_h': [0.0, 0.5]}, @@ -140,7 +140,7 @@ def plot_marine_vrfy(config): colormap='jet', comout=os.path.join(comout, 'vrfy', 'ana')), # ocean surface analysis plotConfig(grid_file=grid_file, - data_file=os.path.join(com_ocean_history, f'{RUN}.t{gcyc}z.ocnf006.nc'), + data_file=os.path.join(com_ocean_history, f'{RUN}.ocean.t{gcyc}z.inst.f006.nc'), variables_horiz={'ave_ssh': [-1.8, 1.3], 'Temp': [-1.8, 34.0], 'Salt': [30, 38]}, diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt index 86b53a3f1..c2fdf7b81 100644 --- a/test/CMakeLists.txt +++ b/test/CMakeLists.txt @@ -134,19 +134,15 @@ if (WORKFLOW_TESTS) WORKING_DIRECTORY ${PROJECT_BINARY_DIR}/test/testrun) endif() -# fv3jedi tests if (${BUILD_GDASBUNDLE}) - add_subdirectory(fv3jedi) -endif() + add_subdirectory(fv3jedi) # fv3jedi tests + add_subdirectory(soca) # soca tests + add_subdirectory(snow) # snow da tests + option(RUN_GW_CI "Enable the global-workflow CI tests" OFF) + if (RUN_GW_CI) + add_subdirectory(gw-ci) # subset of the global-workflow ci tests + endif() -# soca tests -if (${BUILD_GDASBUNDLE}) - add_subdirectory(soca) -endif() - -# snow da tests -if (${BUILD_GDASBUNDLE}) - add_subdirectory(snow) endif() # gdas atm tests diff --git a/test/atm/global-workflow/3dvar.ref b/test/atm/global-workflow/3dvar.ref new file mode 100644 index 000000000..e5de4f09d --- /dev/null +++ b/test/atm/global-workflow/3dvar.ref @@ -0,0 +1,94 @@ +CostJb : Nonlinear Jb = 0.0000000000000000e+00 +CostJo : Nonlinear Jo(AMSUA N19) = 3.8984270736441467e+04, nobs = 74275, Jo/n = 5.2486396144653613e-01, err = 6.3650648936327991e+00 +CostJo : Nonlinear Jo(sondes) = 1.0738490911289638e+04, nobs = 4255, Jo/n = 2.5237346442513839e+00, err = 1.0981085630539836e+01 +CostFunction: Nonlinear J = 4.9722761647731109e+04 +DRPCGMinimizer: reduction in residual norm = 9.6484392730339263e-01 +CostFunction::addIncrement: Analysis: +---------------------------------------------------------------------------------------------------- +State print | number of fields = 22 | cube sphere face size: C48 +eastward_wind | Min:-5.5495644220059390e+01 Max:+8.4934651204487366e+01 RMS:+1.6388250288534099e+01 +northward_wind | Min:-7.3712421229093422e+01 Max:+7.6984825502186652e+01 RMS:+8.1424144891570887e+00 +air_temperature | Min:+1.7482158077318462e+02 Max:+3.1467235233685437e+02 RMS:+2.4978424883089124e+02 +air_pressure_thickness | Min:+6.0599999874109178e-01 Max:+1.7760098554198755e+03 RMS:+9.5680601590204810e+02 +surface_pressure | Min:+5.3297486514404744e+04 Max:+1.0397927292902800e+05 RMS:+9.8896232762674656e+04 +specific_humidity | Min:+0.0000000000000000e+00 Max:+2.0041369334954085e-02 RMS:+4.7792636981582193e-03 +cloud_liquid_ice | Min:+0.0000000000000000e+00 Max:+1.2024392024662985e-03 RMS:+1.4396968866569995e-05 +cloud_liquid_water | Min:+0.0000000000000000e+00 Max:+1.5935686618789048e-03 RMS:+4.1931345590543333e-05 +ozone_mass_mixing_ratio | Min:+1.1998327271379204e-08 Max:+1.7808431790670831e-05 RMS:+4.4947917149861064e-06 +sfc_geopotential_height_times_grav | Min:-2.6495993813133236e+02 Max:+5.1678555530273436e+04 RMS:+6.1813840931281156e+03 +slmsk | Min:+0.0000000000000000e+00 Max:+2.0000000000000000e+00 RMS:+7.3450427349184144e-01 +sheleg | Min:+0.0000000000000000e+00 Max:+1.8650088674622128e+02 RMS:+2.2457791153397761e+01 +sea_surface_temperature | Min:+2.1068090851678403e+02 Max:+3.3125178729099270e+02 RMS:+2.8949442340676961e+02 +vtype | Min:+0.0000000000000000e+00 Max:+2.0000000000000000e+01 RMS:+6.5214489858466269e+00 +stype | Min:+0.0000000000000000e+00 Max:+1.6000000000000000e+01 RMS:+4.5380018343041852e+00 +vfrac | Min:+0.0000000000000000e+00 Max:+9.7215068788150627e-01 RMS:+2.2249321391025326e-01 +stc | Min:+2.1550390656839755e+02 Max:+3.2609885436959308e+02 RMS:+2.8945169639231835e+02 +soilMoistureVolumetric | Min:+1.1321643448483208e-02 Max:+1.0000000000000000e+00 RMS:+8.3306585674111977e-01 +totalSnowDepth | Min:+0.0000000000000000e+00 Max:+1.7098998490425442e+03 RMS:+1.8885640927654018e+02 +surface_eastward_wind | Min:-1.6966302087131233e+01 Max:+2.1273647117120184e+01 RMS:+5.4892133439475188e+00 +surface_northward_wind | Min:-1.7367330522224339e+01 Max:+2.2091894088184446e+01 RMS:+4.4723535847538756e+00 +f10m | Min:+9.4613586695410434e-01 Max:+1.0695628272426509e+00 RMS:+9.9526981313535046e-01 +---------------------------------------------------------------------------------------------------- + +Obs bias coefficients: +--------------------------------------------------------------- + constant: Min= -1.2490630149841309, Max= 1.6242779492736057, Norm= 3.1216962856027304 + lapse_rate_order_2: Min= -6.0771207806339094, Max= 52.3767318715110477, Norm= 52.7793275314599910 + lapse_rate: Min= -6.1173920629465659, Max= 0.4750269950800435, Norm= 6.1658157312728985 + emissivity: Min= -0.2395000010728836, Max= 10.5581263135712575, Norm= 10.5655898122368264 + scan_angle_order_4: Min= -7.9506011008096911, Max= 2.3034050466999081, Norm= 13.1727465012547444 + scan_angle_order_3: Min= -1.1903680560584049, Max= 2.3802189822803275, Norm= 3.1970135720803450 + scan_angle_order_2: Min= -5.1212630271747548, Max= 2.9977130889532191, Norm= 7.3569496454063597 + scan_angle: Min= -0.6766870022405442, Max= 0.0732169970537269, Norm= 1.1485053152110289 +--------------------------------------------------------------- + + +CostJb : Nonlinear Jb = 0.0000003263133461 +CostJo : Nonlinear Jo(AMSUA N19) = 38981.7238460305161425, nobs = 74275, Jo/n = 0.5248296714376374, err = 6.3650648936327991 +CostJo : Nonlinear Jo(sondes) = 10687.2756506645455374, nobs = 4255, Jo/n = 2.5116981552678133, err = 10.9810856305398357 +CostFunction: Nonlinear J = 49668.9994970213738270 +DRPCGMinimizer: reduction in residual norm = 1.0147575953754833 +CostFunction::addIncrement: Analysis: +---------------------------------------------------------------------------------------------------- +State print | number of fields = 22 | cube sphere face size: C48 +eastward_wind | Min:-5.5495644220059390e+01 Max:+8.4934651204487366e+01 RMS:+1.6388250288542711e+01 +northward_wind | Min:-7.3712421229093422e+01 Max:+7.6984825502186652e+01 RMS:+8.1424144891586359e+00 +air_temperature | Min:+1.7482158077318311e+02 Max:+3.1467235233685437e+02 RMS:+2.4978424883200961e+02 +air_pressure_thickness | Min:+6.0599999874109178e-01 Max:+1.7760098554198755e+03 RMS:+9.5680601590204810e+02 +surface_pressure | Min:+5.3297486514404744e+04 Max:+1.0397927292902800e+05 RMS:+9.8896232762674656e+04 +specific_humidity | Min:+0.0000000000000000e+00 Max:+2.0041369334954085e-02 RMS:+4.7792638362681448e-03 +cloud_liquid_ice | Min:+0.0000000000000000e+00 Max:+1.2024392024662985e-03 RMS:+1.4396968866569995e-05 +cloud_liquid_water | Min:+0.0000000000000000e+00 Max:+1.5935686618789048e-03 RMS:+4.1931345590543333e-05 +ozone_mass_mixing_ratio | Min:+1.1998327271379204e-08 Max:+1.7808431790670831e-05 RMS:+4.4947917149861064e-06 +sfc_geopotential_height_times_grav | Min:-2.6495993813133236e+02 Max:+5.1678555530273436e+04 RMS:+6.1813840931281156e+03 +slmsk | Min:+0.0000000000000000e+00 Max:+2.0000000000000000e+00 RMS:+7.3450427349184144e-01 +sheleg | Min:+0.0000000000000000e+00 Max:+1.8650088674622128e+02 RMS:+2.2457791153397761e+01 +sea_surface_temperature | Min:+2.1068090851678403e+02 Max:+3.3125178729099270e+02 RMS:+2.8949442340676961e+02 +vtype | Min:+0.0000000000000000e+00 Max:+2.0000000000000000e+01 RMS:+6.5214489858466269e+00 +stype | Min:+0.0000000000000000e+00 Max:+1.6000000000000000e+01 RMS:+4.5380018343041852e+00 +vfrac | Min:+0.0000000000000000e+00 Max:+9.7215068788150627e-01 RMS:+2.2249321391025326e-01 +stc | Min:+2.1550390656839755e+02 Max:+3.2609885436959308e+02 RMS:+2.8945169639231835e+02 +soilMoistureVolumetric | Min:+1.1321643448483208e-02 Max:+1.0000000000000000e+00 RMS:+8.3306585674111977e-01 +totalSnowDepth | Min:+0.0000000000000000e+00 Max:+1.7098998490425442e+03 RMS:+1.8885640927654018e+02 +surface_eastward_wind | Min:-1.6966302087131233e+01 Max:+2.1273647117120184e+01 RMS:+5.4892133439475188e+00 +surface_northward_wind | Min:-1.7367330522224339e+01 Max:+2.2091894088184446e+01 RMS:+4.4723535847538756e+00 +f10m | Min:+9.4613586695410434e-01 Max:+1.0695628272426509e+00 RMS:+9.9526981313535046e-01 +---------------------------------------------------------------------------------------------------- + +Obs bias coefficients: +--------------------------------------------------------------- + constant: Min= -1.2490630149841309, Max= 1.6242779490057870, Norm= 3.1216962826704004 + lapse_rate_order_2: Min= -6.0771207792168234, Max= 52.3767318668067006, Norm= 52.7793275266354911 + lapse_rate: Min= -6.1173920620497713, Max= 0.4750269957036114, Norm= 6.1658157301802641 + emissivity: Min= -0.2395000010728836, Max= 10.5581256964462273, Norm= 10.5655891955375107 + scan_angle_order_4: Min= -7.9506011003013111, Max= 2.3034050477855654, Norm= 13.1727464989080296 + scan_angle_order_3: Min= -1.1903680549739593, Max= 2.3802189803928178, Norm= 3.1970135704212637 + scan_angle_order_2: Min= -5.1212630270944297, Max= 2.9977130887912211, Norm= 7.3569496444930564 + scan_angle: Min= -0.6766870025059790, Max= 0.0732169969027750, Norm= 1.1485053159201970 +--------------------------------------------------------------- + + +CostJb : Nonlinear Jb = 0.0000033688688972 +CostJo : Nonlinear Jo(AMSUA N19) = 38970.3026556682743831, nobs = 74275, Jo/n = 0.5246759024660824, err = 6.3650648936327991 +CostJo : Nonlinear Jo(sondes) = 10639.1209516101644112, nobs = 4255, Jo/n = 2.5003809521998037, err = 10.9810856305398357 +CostFunction: Nonlinear J = 49609.4236106473108521 diff --git a/test/atm/global-workflow/jcb-prototype_3dvar.yaml.j2 b/test/atm/global-workflow/jcb-prototype_3dvar.yaml.j2 index cc6d01446..9ea2debc3 100644 --- a/test/atm/global-workflow/jcb-prototype_3dvar.yaml.j2 +++ b/test/atm/global-workflow/jcb-prototype_3dvar.yaml.j2 @@ -14,3 +14,10 @@ observations: # The observation files in the testing are appended using the yyymmddhh similar to JEDI tests atmosphere_obsdatain_suffix: ".{{ current_cycle | to_YMDH }}.nc" + +# Testing things +# -------------- +test_reference_filename: {{ HOMEgfs }}/sorc/gdas.cd/test/atm/global-workflow/3dvar.ref +test_output_filename: ./3dvar.out +float_relative_tolerance: 1.0e-3 +float_absolute_tolerance: 1.0e-5 diff --git a/test/atm/global-workflow/jcb-prototype_lgetkf.yaml.j2 b/test/atm/global-workflow/jcb-prototype_lgetkf.yaml.j2 index 0ca12c18a..201d042e1 100644 --- a/test/atm/global-workflow/jcb-prototype_lgetkf.yaml.j2 +++ b/test/atm/global-workflow/jcb-prototype_lgetkf.yaml.j2 @@ -19,3 +19,10 @@ observations: # The observation files in the testing are appended using the yyymmddhh similar to JEDI tests atmosphere_obsdatain_suffix: ".{{ current_cycle | to_YMDH }}.nc" + +# Testing things +# -------------- +test_reference_filename: {{ HOMEgfs }}/sorc/gdas.cd/test/atm/global-workflow/lgetkf.ref +test_output_filename: ./lgetkf.out +float_relative_tolerance: 1.0e-3 +float_absolute_tolerance: 1.0e-5 diff --git a/test/atm/global-workflow/lgetkf.ref b/test/atm/global-workflow/lgetkf.ref new file mode 100644 index 000000000..506c0c813 --- /dev/null +++ b/test/atm/global-workflow/lgetkf.ref @@ -0,0 +1,197 @@ +Initial state for member 1: +---------------------------------------------------------------------------------------------------- +State print | number of fields = 23 | cube sphere face size: C48 +eastward_wind | Min:-5.1619864857418477e+01 Max:+8.6812084442971653e+01 RMS:+1.5983411875766325e+01 +northward_wind | Min:-7.3094846805319690e+01 Max:+7.0353817207582651e+01 RMS:+7.7265473774262041e+00 +air_temperature | Min:+1.7646396818493386e+02 Max:+3.1441817730817547e+02 RMS:+2.4989439192167012e+02 +layer_thickness | Min:-2.8728340670782936e+03 Max:-1.5844010522128334e+01 RMS:+9.8310890327340439e+02 +air_pressure_thickness | Min:+6.0599999953541761e-01 Max:+1.7747677648323079e+03 RMS:+9.5676512006998769e+02 +surface_pressure | Min:+5.3258706655314105e+04 Max:+1.0392278267916713e+05 RMS:+9.8892468668120782e+04 +specific_humidity | Min:+9.5164908108891825e-09 Max:+2.0222136340880194e-02 RMS:+4.8552459174929334e-03 +cloud_liquid_ice | Min:-1.3545705512023003e-20 Max:+6.7304686933261399e-04 RMS:+1.0726563909939355e-05 +cloud_liquid_water | Min:-5.4244251425755909e-20 Max:+1.2879383569881558e-03 RMS:+3.8382491358281577e-05 +ozone_mass_mixing_ratio | Min:+2.9375003505643131e-08 Max:+1.8014885502109894e-05 RMS:+4.4946424525503736e-06 +sfc_geopotential_height_times_grav | Min:-2.6495993813133236e+02 Max:+5.1678555530273436e+04 RMS:+6.1813840931281156e+03 +slmsk | Min:+0.0000000000000000e+00 Max:+2.0000000000000000e+00 RMS:+6.5853328538727862e-01 +sheleg | Min:+0.0000000000000000e+00 Max:+3.3367721414977348e+02 RMS:+2.2844482914802999e+01 +sea_surface_temperature | Min:+2.0791388472150163e+02 Max:+3.4773327272108133e+02 RMS:+2.8888781974396971e+02 +vtype | Min:+0.0000000000000000e+00 Max:+1.9000000000000000e+01 RMS:+5.6602100164707281e+00 +stype | Min:+0.0000000000000000e+00 Max:+1.6000000000000000e+01 RMS:+4.1039110890529598e+00 +vfrac | Min:+0.0000000000000000e+00 Max:+9.7215068788150627e-01 RMS:+1.9167119887330206e-01 +stc | Min:+2.2044502210464319e+02 Max:+3.1531527244154063e+02 RMS:+2.8916372621979713e+02 +soilMoistureVolumetric | Min:+2.0000000000000000e-02 Max:+1.0000000000000000e+00 RMS:+8.7096638468255239e-01 +totalSnowDepth | Min:+0.0000000000000000e+00 Max:+1.8134236127502425e+03 RMS:+1.8222715253202779e+02 +surface_eastward_wind | Min:-2.0302650327509099e+01 Max:+2.1760551255897649e+01 RMS:+5.2972810149144989e+00 +surface_northward_wind | Min:-1.4968894650435200e+01 Max:+1.6765905429678224e+01 RMS:+4.2782356748931996e+00 +f10m | Min:+9.3257572069704375e-01 Max:+1.0782447572529907e+00 RMS:+9.9542831934707332e-01 +---------------------------------------------------------------------------------------------------- +Initial state for member 2: +---------------------------------------------------------------------------------------------------- +State print | number of fields = 23 | cube sphere face size: C48 +eastward_wind | Min:-5.2853490758012072e+01 Max:+8.7248708527964936e+01 RMS:+1.5955809745480765e+01 +northward_wind | Min:-7.2178651628585953e+01 Max:+7.2568167230757609e+01 RMS:+7.7335061167195889e+00 +air_temperature | Min:+1.7660670217895057e+02 Max:+3.1526174222386965e+02 RMS:+2.4989392943476341e+02 +layer_thickness | Min:-2.8750852837791022e+03 Max:-1.5794597440684603e+01 RMS:+9.8301208643901532e+02 +air_pressure_thickness | Min:+6.0599999948625027e-01 Max:+1.7752652524725063e+03 RMS:+9.5675980922035808e+02 +surface_pressure | Min:+5.3316703550070277e+04 Max:+1.0394529585957996e+05 RMS:+9.8891952825814413e+04 +specific_humidity | Min:+2.1514597275760548e-08 Max:+1.9663168048133908e-02 RMS:+4.8569105861498299e-03 +cloud_liquid_ice | Min:-1.3544569699620215e-20 Max:+7.9557019872326799e-04 RMS:+1.0816147029169270e-05 +cloud_liquid_water | Min:-5.4200711162051571e-20 Max:+1.2298889446687945e-03 RMS:+3.8689917911663205e-05 +ozone_mass_mixing_ratio | Min:+1.6238717942558199e-08 Max:+1.8031521448783218e-05 RMS:+4.4947640660308833e-06 +sfc_geopotential_height_times_grav | Min:-2.6495993813133236e+02 Max:+5.1678555530273436e+04 RMS:+6.1813840931281156e+03 +slmsk | Min:+0.0000000000000000e+00 Max:+2.0000000000000000e+00 RMS:+6.5853328538727862e-01 +sheleg | Min:+0.0000000000000000e+00 Max:+3.3446253401369569e+02 RMS:+2.2874187237166272e+01 +sea_surface_temperature | Min:+2.0978258608237891e+02 Max:+3.4063292812032523e+02 RMS:+2.8892010225961866e+02 +vtype | Min:+0.0000000000000000e+00 Max:+1.9000000000000000e+01 RMS:+5.6602100164707281e+00 +stype | Min:+0.0000000000000000e+00 Max:+1.6000000000000000e+01 RMS:+4.1039110890529598e+00 +vfrac | Min:+0.0000000000000000e+00 Max:+9.7215068788150627e-01 RMS:+1.9167119887330206e-01 +stc | Min:+2.2042860927849489e+02 Max:+3.1556639379821877e+02 RMS:+2.8916436305504556e+02 +soilMoistureVolumetric | Min:+2.0000000000000000e-02 Max:+1.0000000000000000e+00 RMS:+8.7096997815200794e-01 +totalSnowDepth | Min:+0.0000000000000000e+00 Max:+1.8153278546401407e+03 RMS:+1.8214466028069003e+02 +surface_eastward_wind | Min:-1.9122075401123020e+01 Max:+2.1444683230863411e+01 RMS:+5.3022867151420767e+00 +surface_northward_wind | Min:-1.4756460531574159e+01 Max:+1.8507846311761583e+01 RMS:+4.2802135707024282e+00 +f10m | Min:+9.3238101450200428e-01 Max:+1.0793518217027296e+00 RMS:+9.9539536810295792e-01 +---------------------------------------------------------------------------------------------------- +Initial state for member 3: +---------------------------------------------------------------------------------------------------- +State print | number of fields = 23 | cube sphere face size: C48 +eastward_wind | Min:-5.3776366655868109e+01 Max:+8.6331467223030344e+01 RMS:+1.5960680883840784e+01 +northward_wind | Min:-7.0176400159320124e+01 Max:+6.9531049915936578e+01 RMS:+7.7410680097841684e+00 +air_temperature | Min:+1.7648447482434227e+02 Max:+3.1491330999751426e+02 RMS:+2.4989313307279673e+02 +layer_thickness | Min:-2.8748181022793874e+03 Max:-1.5822712660806282e+01 RMS:+9.8301820591439434e+02 +air_pressure_thickness | Min:+6.0599999955059858e-01 Max:+1.7742011426782731e+03 RMS:+9.5675144888218676e+02 +surface_pressure | Min:+5.3349125363319996e+04 Max:+1.0389718147809265e+05 RMS:+9.8891182576541236e+04 +specific_humidity | Min:+2.1846447498856659e-08 Max:+2.0007929878326374e-02 RMS:+4.8392192734415156e-03 +cloud_liquid_ice | Min:-6.7740196346531239e-21 Max:+6.2473018245821010e-04 RMS:+1.0889666282638220e-05 +cloud_liquid_water | Min:-5.4107584399974539e-20 Max:+1.2132298501853738e-03 RMS:+3.8405804865664137e-05 +ozone_mass_mixing_ratio | Min:+1.2289325508931378e-08 Max:+1.8036413152177077e-05 RMS:+4.4950884931704925e-06 +sfc_geopotential_height_times_grav | Min:-2.6495993813133236e+02 Max:+5.1678555530273436e+04 RMS:+6.1813840931281156e+03 +slmsk | Min:+0.0000000000000000e+00 Max:+2.0000000000000000e+00 RMS:+6.5853328538727862e-01 +sheleg | Min:+0.0000000000000000e+00 Max:+3.0768588203961116e+02 RMS:+2.2922702270144551e+01 +sea_surface_temperature | Min:+2.0916406214271251e+02 Max:+3.4510570590652480e+02 RMS:+2.8892699554065905e+02 +vtype | Min:+0.0000000000000000e+00 Max:+1.9000000000000000e+01 RMS:+5.6602100164707281e+00 +stype | Min:+0.0000000000000000e+00 Max:+1.6000000000000000e+01 RMS:+4.1039110890529598e+00 +vfrac | Min:+0.0000000000000000e+00 Max:+9.7215068788150627e-01 RMS:+1.9167119887330206e-01 +stc | Min:+2.2045545733453196e+02 Max:+3.1492502526099332e+02 RMS:+2.8916655770658571e+02 +soilMoistureVolumetric | Min:+2.0000000000000000e-02 Max:+1.0000000000000000e+00 RMS:+8.7097084862976482e-01 +totalSnowDepth | Min:+0.0000000000000000e+00 Max:+1.8143247567290359e+03 RMS:+1.8207043992662719e+02 +surface_eastward_wind | Min:-1.8951406320589882e+01 Max:+2.1985111394345580e+01 RMS:+5.3062512116922882e+00 +surface_northward_wind | Min:-1.4183550286832059e+01 Max:+1.7289090100273199e+01 RMS:+4.2365567732484486e+00 +f10m | Min:+9.2995810057091766e-01 Max:+1.0787102983488339e+00 RMS:+9.9539268028154582e-01 +---------------------------------------------------------------------------------------------------- +H(x) for member 1: +AMSUA N19 nobs= 83277 Min=201.867856449266, Max=282.0067070796676, RMS=233.8395702227402 + +sondes nobs= 8481 Min=-33.72854932079181, Max=309.9716288420037, RMS=141.2118598915683 + + +H(x) for member 2: +AMSUA N19 nobs= 83277 Min=201.7137808797608, Max=281.5706320956667, RMS=233.8396302310865 + +sondes nobs= 8481 Min=-30.53139157018941, Max=310.1652090473367, RMS=141.1987914521134 + + +H(x) for member 3: +AMSUA N19 nobs= 83277 Min=201.6466089031518, Max=281.7411453383614, RMS=233.841237886985 + +sondes nobs= 8481 Min=-32.8575760671954, Max=310.4005466290033, RMS=141.1778598834968 + + +H(x) ensemble background mean: +AMSUA N19 nobs= 83277 Min=201.767617763992, Max=281.764603524317, RMS=233.8401187797573 + +sondes nobs= 8481 Min=-32.28649706124742, Max=310.1791281727812, RMS=141.1945767641295 + + +background y - H(x): +AMSUA N19 nobs= 83254 Min=-39.30084110186959, Max=14.76474343816579, RMS=1.484386599967397 + +sondes nobs= 3950 Min=-16.7986628776744, Max=16.07421631717779, RMS=4.060196677972983 + + +Background mean : +---------------------------------------------------------------------------------------------------- +State print | number of fields = 23 | cube sphere face size: C48 +eastward_wind | Min:-5.0268509151643670e+01 Max:+8.6686198337381171e+01 RMS:+1.5943048359570170e+01 +northward_wind | Min:-7.1281090590794832e+01 Max:+7.0613804297416578e+01 RMS:+7.6835678597476624e+00 +air_temperature | Min:+1.7652661362892786e+02 Max:+3.1486440984318642e+02 RMS:+2.4989353796113025e+02 +layer_thickness | Min:-2.8742236189948235e+03 Max:-1.5820440207873071e+01 RMS:+9.8304588977244248e+02 +air_pressure_thickness | Min:+6.0599999954215089e-01 Max:+1.7745271604965960e+03 RMS:+9.5675867998450212e+02 +surface_pressure | Min:+5.3308178522901449e+04 Max:+1.0391190836184639e+05 RMS:+9.8891859477911828e+04 +specific_humidity | Min:+8.4477932715987172e-08 Max:+1.9531761470252519e-02 RMS:+4.8455165312756591e-03 +cloud_liquid_ice | Min:-4.5148565665400716e-21 Max:+4.1857236992490481e-04 RMS:+1.0081670582489738e-05 +cloud_liquid_water | Min:-1.8646059632736959e-20 Max:+1.0447516732817988e-03 RMS:+3.3566571025591825e-05 +ozone_mass_mixing_ratio | Min:+2.8043654425670649e-08 Max:+1.7892907435183836e-05 RMS:+4.4944974047442364e-06 +sfc_geopotential_height_times_grav | Min:-2.6495993813133236e+02 Max:+5.1678555530273436e+04 RMS:+6.1813840931281156e+03 +slmsk | Min:+0.0000000000000000e+00 Max:+2.0000000000000000e+00 RMS:+6.5853328538727862e-01 +sheleg | Min:+0.0000000000000000e+00 Max:+3.2527521006769342e+02 RMS:+2.2874231579198337e+01 +sea_surface_temperature | Min:+2.0909325814222842e+02 Max:+3.4400445627039335e+02 RMS:+2.8891122950497800e+02 +vtype | Min:+0.0000000000000000e+00 Max:+1.9000000000000000e+01 RMS:+5.6602100164707281e+00 +stype | Min:+0.0000000000000000e+00 Max:+1.6000000000000000e+01 RMS:+4.1039110890529598e+00 +vfrac | Min:+0.0000000000000000e+00 Max:+9.7215068788150627e-01 RMS:+1.9167119887330206e-01 +stc | Min:+2.2044302957255664e+02 Max:+3.1448974639746052e+02 RMS:+2.8916485102080441e+02 +soilMoistureVolumetric | Min:+1.9999999999999997e-02 Max:+1.0000000000000000e+00 RMS:+8.7096039254926949e-01 +totalSnowDepth | Min:+0.0000000000000000e+00 Max:+1.8143587413731393e+03 RMS:+1.8203629732600007e+02 +surface_eastward_wind | Min:-1.9400969882086276e+01 Max:+2.1628152789536944e+01 RMS:+5.2804512030599353e+00 +surface_northward_wind | Min:-1.4483927323189917e+01 Max:+1.6910069485657925e+01 RMS:+4.2391121142819319e+00 +f10m | Min:+9.3258902621206641e-01 Max:+1.0787625575968645e+00 RMS:+9.9540515270826369e-01 +---------------------------------------------------------------------------------------------------- +Analysis mean : +---------------------------------------------------------------------------------------------------- +State print | number of fields = 23 | cube sphere face size: C48 +eastward_wind | Min:-5.0291091239258996e+01 Max:+8.8974835429946992e+01 RMS:+1.5969146223857392e+01 +northward_wind | Min:-7.1207756314596480e+01 Max:+7.7474649441430500e+01 RMS:+7.7121664031692445e+00 +air_temperature | Min:+1.7652661362975533e+02 Max:+3.1526920633902677e+02 RMS:+2.4991104122253967e+02 +layer_thickness | Min:-2.8742236223448945e+03 Max:-1.5811043880008970e+01 RMS:+9.8305911850886582e+02 +air_pressure_thickness | Min:+6.0599999954215089e-01 Max:+1.7747701325883327e+03 RMS:+9.5671866301370255e+02 +surface_pressure | Min:+5.3308178524829898e+04 Max:+1.0391190830956653e+05 RMS:+9.8891859479665873e+04 +specific_humidity | Min:+0.0000000000000000e+00 Max:+1.9531761470252519e-02 RMS:+4.8542934028708851e-03 +cloud_liquid_ice | Min:+0.0000000000000000e+00 Max:+3.9005078876250314e-04 RMS:+1.0464822449614852e-05 +cloud_liquid_water | Min:+0.0000000000000000e+00 Max:+1.8113630797591289e-03 RMS:+3.5137404071414924e-05 +ozone_mass_mixing_ratio | Min:+0.0000000000000000e+00 Max:+1.7891446519775333e-05 RMS:+4.4959623220880551e-06 +sfc_geopotential_height_times_grav | Min:-2.6495993813133236e+02 Max:+5.1678555530273436e+04 RMS:+6.1813840931281156e+03 +slmsk | Min:+0.0000000000000000e+00 Max:+2.0000000000000000e+00 RMS:+6.5853328538727862e-01 +sheleg | Min:+0.0000000000000000e+00 Max:+3.2527521006769342e+02 RMS:+2.2874231579198337e+01 +sea_surface_temperature | Min:+2.0909325814222842e+02 Max:+3.4400445627039335e+02 RMS:+2.8891122950497800e+02 +vtype | Min:+0.0000000000000000e+00 Max:+1.9000000000000000e+01 RMS:+5.6602100164707281e+00 +stype | Min:+0.0000000000000000e+00 Max:+1.6000000000000000e+01 RMS:+4.1039110890529598e+00 +vfrac | Min:+0.0000000000000000e+00 Max:+9.7215068788150627e-01 RMS:+1.9167119887330206e-01 +stc | Min:+2.2044302957255664e+02 Max:+3.1448974639746052e+02 RMS:+2.8916485102080441e+02 +soilMoistureVolumetric | Min:+1.9999999999999997e-02 Max:+1.0000000000000000e+00 RMS:+8.7096039254926949e-01 +totalSnowDepth | Min:+0.0000000000000000e+00 Max:+1.8143587413731393e+03 RMS:+1.8203629732600007e+02 +surface_eastward_wind | Min:-1.9400969882086276e+01 Max:+2.1628152789536944e+01 RMS:+5.2804512030599353e+00 +surface_northward_wind | Min:-1.4483927323189917e+01 Max:+1.6910069485657925e+01 RMS:+4.2391121142819319e+00 +f10m | Min:+9.3258902621206641e-01 Max:+1.0787625575968645e+00 RMS:+9.9540515270826369e-01 +---------------------------------------------------------------------------------------------------- +H(x) for member 1: +AMSUA N19 nobs= 83277 Min=201.948249566513, Max=281.9939007255653, RMS=233.8812606058921 + +sondes nobs= 8481 Min=-32.57419789664539, Max=310.0585910080372, RMS=141.1835372274868 + + +H(x) for member 2: +AMSUA N19 nobs= 83277 Min=201.7220008170584, Max=281.5396769750011, RMS=233.8797643864029 + +sondes nobs= 8481 Min=-30.01531933777411, Max=310.1616281283975, RMS=141.1677770998922 + + +H(x) for member 3: +AMSUA N19 nobs= 83277 Min=201.7936859824386, Max=281.798062021774, RMS=233.8820591954841 + +sondes nobs= 8481 Min=-32.21444465700474, Max=310.4926390655562, RMS=141.1518466835251 + + +H(x) ensemble analysis mean: +AMSUA N19 nobs= 83277 Min=201.8698936751222, Max=281.7332912576108, RMS=233.8810051433246 + +sondes nobs= 8481 Min=-31.5986820508983, Max=310.2376194006636, RMS=141.1665087234307 + + +analysis y - H(x): +AMSUA N19 nobs= 83254 Min=-39.2511998193601, Max=14.76187988153276, RMS=1.466781023101904 + +sondes nobs= 3950 Min=-17.36927867050949, Max=15.32167936052966, RMS=3.976041107561534 + + +ombg RMS: 1.688286984844696 +oman RMS: 1.664354500684967 diff --git a/test/gw-ci/CMakeLists.txt b/test/gw-ci/CMakeLists.txt new file mode 100644 index 000000000..dfd2a112f --- /dev/null +++ b/test/gw-ci/CMakeLists.txt @@ -0,0 +1,86 @@ +# Function that generates the 1/2 cycle forecast and DA tasks +function(add_cycling_tests pslot YAML_PATH HOMEgfs RUNTESTS PROJECT_SOURCE_DIR TASK_LIST) + # Prepare the COMROOT and EXPDIR for the cycling ctests + add_test(NAME ${pslot} + COMMAND /bin/bash -c "${PROJECT_SOURCE_DIR}/test/gw-ci/create_exp.sh ${YAML_PATH} ${pslot} ${HOMEgfs} ${RUNTESTS}" + WORKING_DIRECTORY ${RUNTESTS}) + set_tests_properties(${pslot} PROPERTIES LABELS "manual") + + # Get the 1/2 cycle and full cycle's dates + execute_process( + COMMAND ${CMAKE_COMMAND} -E env python ${PROJECT_SOURCE_DIR}/test/gw-ci/get_cycles.py ${YAML_PATH} + OUTPUT_VARIABLE SCRIPT_OUTPUT + RESULT_VARIABLE SCRIPT_RESULT + OUTPUT_STRIP_TRAILING_WHITESPACE + ) + string(REPLACE "," ";" DATES_LIST ${SCRIPT_OUTPUT}) + list(GET DATES_LIST 0 HALF_CYCLE) + list(GET DATES_LIST 1 FULL_CYCLE) + + # 1/2 cycle gdasfcst + message(STATUS "preparing 1/2 cycle gdasfcst for ${pslot} ctest") + add_test(NAME ${pslot}_gdasfcst_${HALF_CYCLE} + COMMAND /bin/bash -c "${PROJECT_SOURCE_DIR}/test/gw-ci/run_exp.sh ${pslot} gdasfcst ${HALF_CYCLE}" + WORKING_DIRECTORY ${RUNTESTS}) + set_tests_properties(${pslot}_gdasfcst_${HALF_CYCLE} PROPERTIES LABELS "manual") + + # Select the list of tasks to run for the full cycle + message(STATUS "Tasks ${TASK_LIST}") + + foreach(task ${TASK_LIST}) + message(STATUS "preparing the full cycle ${task} for ${pslot} ctest") + add_test(NAME ${pslot}_${task}_${FULL_CYCLE} + COMMAND /bin/bash -c "${PROJECT_SOURCE_DIR}/test/gw-ci/run_exp.sh ${pslot} ${task} ${FULL_CYCLE}" + WORKING_DIRECTORY ${RUNTESTS}) + set_tests_properties(${pslot}_${task}_${FULL_CYCLE} PROPERTIES LABELS "manual") + endforeach() +endfunction() + +# Setup the environement +set(HOMEgfs ${CMAKE_SOURCE_DIR}/../../..) +set(RUNTESTS ${CMAKE_CURRENT_BINARY_DIR}/../../test/gw-ci) + +# WCDA, low-res +# ------------- +set(pslot "WCDA-3DVAR-C48mx500") +set(YAML_PATH ${HOMEgfs}/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml) +set(TASK_LIST + "gdasprepoceanobs" + "gdasocnanalprep" + "gdasocnanalbmat" + "gdasocnanalrun" + "gdasocnanalchkpt" + "gdasocnanalpost" + ) +add_cycling_tests(${pslot} ${YAML_PATH} ${HOMEgfs} ${RUNTESTS} ${PROJECT_SOURCE_DIR} "${TASK_LIST}") + +# Aero-Land DA, C96 +# ----------------- +set(pslot "Aero-Snow-3DVAR-C96") +set(YAML_PATH ${HOMEgfs}/ci/cases/pr/C96_atmaerosnowDA.yaml) +set(TASK_LIST) # empty list for now +add_cycling_tests(${pslot} ${YAML_PATH} ${HOMEgfs} ${RUNTESTS} ${PROJECT_SOURCE_DIR} "${TASK_LIST}") + +# Atm DA, C96/C48 +# --------------- +set(pslot "Atm-hyb-C96C48") +set(YAML_PATH ${HOMEgfs}/ci/cases/pr/C96C48_ufs_hybatmDA.yaml) +set(TASK_LIST) # empty list for now +add_cycling_tests(${pslot} ${YAML_PATH} ${HOMEgfs} ${RUNTESTS} ${PROJECT_SOURCE_DIR} "${TASK_LIST}") + +# GFSv17, 3DVAR prototype +# ----------------------- +set(pslot "GFSv17-3DVAR-C384mx025") +set(YAML_PATH ${HOMEgfs}/ci/cases/gfsv17/C384mx025_3DVarAOWCDA.yaml) +set(TASK_LIST + "gdasprepoceanobs" + "gdasocnanalprep" + "gdasocnanalbmat" + "gdasocnanalrun" + "gdasocnanalchkpt" + "gdasocnanalpost" + "gdasocnanalvrfy" + "gdasprep" + "gdasanal" + ) +add_cycling_tests(${pslot} ${YAML_PATH} ${HOMEgfs} ${RUNTESTS} ${PROJECT_SOURCE_DIR} "${TASK_LIST}") diff --git a/test/gw-ci/create_exp.sh b/test/gw-ci/create_exp.sh new file mode 100755 index 000000000..b4c1535db --- /dev/null +++ b/test/gw-ci/create_exp.sh @@ -0,0 +1,21 @@ +#!/bin/bash +expyaml_ctest="$1" +pslot_ctest="$2" +HOMEgfs="$3" +exp_path=$4 + +# Get ICSDIR_ROOT +source "${HOMEgfs}/ush/detect_machine.sh" +source "${HOMEgfs}/ci/platforms/config.${MACHINE_ID}" + +# Arguments for the exp setup +expyaml=${expyaml_ctest} +export pslot=${pslot_ctest} +export RUNTESTS=${exp_path}/${pslot} +export HPC_ACCOUNT="da-cpu" + +# Source the gw environement +source ${HOMEgfs}/workflow/gw_setup.sh + +# Create the experiment +${HOMEgfs}/workflow/create_experiment.py --yaml ${expyaml} --overwrite diff --git a/test/gw-ci/get_cycles.py b/test/gw-ci/get_cycles.py new file mode 100644 index 000000000..b2eff5d5c --- /dev/null +++ b/test/gw-ci/get_cycles.py @@ -0,0 +1,38 @@ +import re +import argparse +from datetime import datetime, timedelta + + +def read_idate_from_yaml(file_path): + idate_value = None + with open(file_path, 'r') as file: + for line in file: + match = re.search(r'^\s*idate:\s*(.*)', line) + + if match: + idate_value = match.group(1).strip() + break + return idate_value + + +def format_dates(idate_str): + date_obj = datetime.strptime(idate_str, '%Y%m%d%H') + half_cycle = date_obj.strftime('%Y%m%d%H%M') + new_date_obj = date_obj + timedelta(hours=6) + full_cycle = new_date_obj.strftime('%Y%m%d%H%M') + + return half_cycle, full_cycle + + +def main(): + parser = argparse.ArgumentParser(description="Extract and format idate.") + parser.add_argument('yaml_file', help="Path to exp.yaml file") + args = parser.parse_args() + + idate_value = read_idate_from_yaml(args.yaml_file) + half_cycle, full_cycle = format_dates(idate_value) + print(f"{half_cycle},{full_cycle}") + + +if __name__ == "__main__": + main() diff --git a/test/gw-ci/run_exp.sh b/test/gw-ci/run_exp.sh new file mode 100755 index 000000000..8040860a0 --- /dev/null +++ b/test/gw-ci/run_exp.sh @@ -0,0 +1,39 @@ +#!/bin/bash + +pslot=$1 +TASK_NAME=$2 +CYCLE=$3 + +# Define the workflow XML and database files +WORKFLOW_XML=${pslot}/EXPDIR/${pslot}/${pslot}.xml +WORKFLOW_DB=${pslot}/EXPDIR/${pslot}/${pslot}.db + +# Boot the task +echo "booting $TASK_NAME for cycle $CYCLE" +if [[ ! -e "$WORKFLOW_DB" ]]; then + rocotorun -w "$WORKFLOW_XML" -d "$WORKFLOW_DB" -t "$TASK_NAME" -c "$CYCLE" +fi +rocotoboot -w "$WORKFLOW_XML" -d "$WORKFLOW_DB" -t "$TASK_NAME" -c "$CYCLE" + +while true; do + # Update the status of the task + rocotorun -w "$WORKFLOW_XML" -d "$WORKFLOW_DB" -t "$TASK_NAME" -c "$CYCLE" + + # Check the task status + OUTPUT=$(rocotostat -w "$WORKFLOW_XML" -d "$WORKFLOW_DB" -t "$TASK_NAME" -c "$CYCLE") + STATUS=$(echo "$OUTPUT" | awk '$2 == task {print $4}' task="$TASK_NAME") + + if [[ "$STATUS" == "SUCCEEDED" ]]; then + echo "The task succeeded." + exit 0 + elif [[ "$STATUS" == "FAILED" ]]; then + echo "The task failed." + exit 1 + elif [[ "$STATUS" == "DEAD" ]]; then + echo "The task is dead." + exit 1 + else + echo "The task is in state: $STATUS" + fi + sleep 10 +done diff --git a/test/soca/gw/CMakeLists.txt b/test/soca/gw/CMakeLists.txt index 38cea0d23..f96a3aec5 100644 --- a/test/soca/gw/CMakeLists.txt +++ b/test/soca/gw/CMakeLists.txt @@ -25,6 +25,10 @@ IF (IS_DIRECTORY /scratch2/NCEPDEV/) set(PARTITION "hera") ENDIF() +IF (IS_DIRECTORY /lfs/h2/) + set(MACHINE "wcoss2") +ENDIF() + # Clean-up add_test(NAME test_gdasapp_soca_run_clean COMMAND ${CMAKE_COMMAND} -E remove_directory ${PROJECT_BINARY_DIR}/test/soca/gw/testrun/testjjobs) @@ -46,11 +50,10 @@ set(jjob_list "JGLOBAL_PREP_OCEAN_OBS" "JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT" "JGDAS_GLOBAL_OCEAN_ANALYSIS_RUN" "JGDAS_GLOBAL_OCEAN_ANALYSIS_ECEN" -# "JGDAS_GLOBAL_OCEAN_ANALYSIS_LETKF" +# "JGDAS_GLOBAL_OCEAN_ANALYSIS_LETKF" "JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT" - "JGDAS_GLOBAL_OCEAN_ANALYSIS_POST") -# TODO(WaterPeople) Add back to the list of tested jobs once fixed -# "JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY") + "JGDAS_GLOBAL_OCEAN_ANALYSIS_POST" + "JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY") set(setup "") foreach(jjob ${jjob_list}) diff --git a/ush/detect_machine.sh b/ush/detect_machine.sh index 8a719c10d..683ee0db7 100755 --- a/ush/detect_machine.sh +++ b/ush/detect_machine.sh @@ -21,10 +21,8 @@ case $(hostname -f) in dlogin0[1-9].dogwood.wcoss2.ncep.noaa.gov) MACHINE_ID=wcoss2 ;; ### dogwood01-9 dlogin10.dogwood.wcoss2.ncep.noaa.gov) MACHINE_ID=wcoss2 ;; ### dogwood10 - gaea9) MACHINE_ID=gaea ;; ### gaea9 - gaea1[0-6]) MACHINE_ID=gaea ;; ### gaea10-16 - gaea9.ncrc.gov) MACHINE_ID=gaea ;; ### gaea9 - gaea1[0-6].ncrc.gov) MACHINE_ID=gaea ;; ### gaea10-16 + gaea5[1-8]) MACHINE_ID=gaea ;; ### gaea51-58 + gaea5[1-8].ncrc.gov) MACHINE_ID=gaea ;; ### gaea51-58 hfe0[1-9]) MACHINE_ID=hera ;; ### hera01-09 hfe1[0-2]) MACHINE_ID=hera ;; ### hera10-12 diff --git a/ush/ioda/bufr2ioda/bufr2ioda_adpupa.py b/ush/ioda/bufr2ioda/bufr2ioda_adpupa.py new file mode 100755 index 000000000..53dce26ad --- /dev/null +++ b/ush/ioda/bufr2ioda/bufr2ioda_adpupa.py @@ -0,0 +1,450 @@ +#!/usr/bin/env python3 +# (C) Copyright 2024 NOAA/NWS/NCEP/EM +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. + +import sys +import os +import argparse +import json +import numpy as np +import numpy.ma as ma +import calendar +import time +import math +from datetime import datetime +from pyiodaconv import bufr +from collections import namedtuple +from pyioda import ioda_obs_space as ioda_ospace +from wxflow import Logger +import copy +import warnings +# suppress warnings +warnings.filterwarnings('ignore') + + +def Compute_WindComponents_from_WindDirection_and_WindSpeed(wdir, wspd): + + uob = (-wspd * np.sin(np.radians(wdir))).astype(np.float32) + vob = (-wspd * np.cos(np.radians(wdir))).astype(np.float32) + + uob = ma.array(uob) + uob = ma.masked_values(uob, uob.fill_value) + vob = ma.array(vob) + vob = ma.masked_values(vob, vob.fill_value) + + return uob, vob + + +def bufr_to_ioda(config, logger): + + subsets = config["subsets"] + logger.debug(f"Checking subsets = {subsets}") + + # Get parameters from configuration + data_format = config["data_format"] + data_type = config["data_type"] + data_description = config["data_description"] + data_provider = config["data_provider"] + cycle_type = config["cycle_type"] + cycle_datetime = config["cycle_datetime"] + dump_dir = config["dump_directory"] + ioda_dir = config["ioda_directory"] + cycle = config["cycle_datetime"] + source = config["source"] + + # Get derived parameters + yyyymmdd = cycle[0:8] + hh = cycle[8:10] + + # General informaton + converter = 'BUFR to IODA Converter' + platform_description = 'ADPUPA BUFR Dump' + + bufrfile = f"{cycle_type}.t{hh}z.{data_type}.tm{hh}.{data_format}" + DATA_PATH = os.path.join(dump_dir, f"{cycle_type}.{yyyymmdd}", str(hh), f"atmos", bufrfile) + if not os.path.isfile(DATA_PATH): + logger.info(f"DATA_PATH {DATA_PATH} does not exist") + return + logger.debug(f"The DATA_PATH is: {DATA_PATH}") + + # ============================================ + # Make the QuerySet for all the data we want + # ============================================ + start_time = time.time() + + logger.info('Making QuerySet') + q = bufr.QuerySet() + + # MetaData + q.add('latitude', '*/CLAT') + q.add('longitude', '*/CLON') + q.add('stationIdentification', '*/RPID') + q.add('stationElevation', '*/SELV') + q.add('pressure', '*/UARLV/PRLC') + # MetaData/Observation Time + q.add('year', '*/YEAR') + q.add('month', '*/MNTH') + q.add('day', '*/DAYS') + q.add('hour', '*/HOUR') + # MetaData/Receipt Time + q.add('receiptTimeSignificance', '*/RCPTIM{1}/RCTS') + q.add('receiptYear', '*/RCPTIM{1}/RCYR') + q.add('receiptMonth', '*/RCPTIM{1}/RCMO') + q.add('receiptDay', '*/RCPTIM{1}/RCDY') + q.add('receiptHour', '*/RCPTIM{1}/RCHR') + q.add('receiptMinute', '*/RCPTIM{1}/RCMI') + # MetaData/Launch Time + q.add('launchHour', '*/UASDG/UALNHR') + q.add('launchMinute', '*/UASDG/UALNMN') + + # ObsValue + q.add('airTemperature', '*/UARLV/UATMP/TMDB') + q.add('dewpointTemperature', '*/UARLV/UATMP/TMDP') + q.add('windDirection', '*/UARLV/UAWND/WDIR') + q.add('windSpeed', '*/UARLV/UAWND/WSPD') + + # QualityMark + q.add('pressureQM', '*/UARLV/QMPR') + q.add('airTemperatureQM', '*/UARLV/UATMP/QMAT') + q.add('dewpointTemperatureQM', '*/UARLV/UATMP/QMDD') + q.add('windSpeedQM', '*/UARLV/UAWND/QMWN') + + end_time = time.time() + running_time = end_time - start_time + logger.debug(f'Running time for making QuerySet : {running_time} seconds') + + # ============================================================== + # Open the BUFR file and execute the QuerySet to get ResultSet + # Use the ResultSet returned to get numpy arrays of the data + # ============================================================== + start_time = time.time() + + logger.info(f"Executing QuerySet for ADPUPA BUFR DUMP to get ResultSet ...") + with bufr.File(DATA_PATH) as f: + try: + r = f.execute(q) + except Exception as err: + logger.info(f'Return with {err}') + return + + # MetaData + logger.debug(f" ... Executing QuerySet: get MetaData ...") + clat = r.get('latitude', group_by='pressure') + clon = r.get('longitude', group_by='pressure') + rpid = r.get('stationIdentification', group_by='pressure') + selv = r.get('stationElevation', group_by='pressure', type='float') + prlc = r.get('pressure', group_by='pressure', type='float') + + # MetaData/Observation Time + year = r.get('year', group_by='pressure') + month = r.get('month', group_by='pressure') + day = r.get('day', group_by='pressure') + hour = r.get('hour', group_by='pressure') + # DateTime: seconds since Epoch time + # IODA has no support for numpy datetime arrays dtype=datetime64[s] + timestamp = r.get_datetime('year', 'month', 'day', 'hour', group_by='pressure').astype(np.int64) + int64_fill_value = np.int64(0) + timestamp = ma.array(timestamp) + timestamp = ma.masked_values(timestamp, int64_fill_value) + + # MetaData/Receipt Time + rcts = r.get('receiptTimeSignificance', group_by='pressure') + receiptYear = r.get('receiptYear', group_by='pressure') + receiptMonth = r.get('receiptMonth', group_by='pressure') + receiptDay = r.get('receiptDay', group_by='pressure') + receiptHour = r.get('receiptHour', group_by='pressure') + receiptMinute = r.get('receiptMinute', group_by='pressure') + logger.debug(f" ... Executing QuerySet: get datatime: receipt time ...") + receipttime1 = r.get_datetime('receiptYear', 'receiptMonth', 'receiptDay', 'receiptHour', 'receiptMinute', group_by='pressure').astype(np.int64) + # Extract receipttime from receipttime1, which belongs to RCTS=0, i.e. General decoder receipt time + receipttime = receipttime1 + receipttime = np.where(rcts == 0, receipttime1, receipttime) + receipttime = ma.array(receipttime) + receipttime = ma.masked_values(receipttime, int64_fill_value) + + # MetaData/Launch Time + launchHour = r.get('launchHour', group_by='pressure') + launchMinute = r.get('launchMinute', group_by='pressure') + logger.debug(f" ... Executing QuerySet: get datatime: launch time ...") + launchtime = r.get_datetime('year', 'month', 'day', 'launchHour', 'launchMinute', group_by='pressure').astype(np.int64) + launchtime = ma.array(launchtime) + launchtime = ma.masked_values(launchtime, int64_fill_value) + + # ObsValue + tmdb = r.get('airTemperature', group_by='pressure') + tmdp = r.get('dewpointTemperature', group_by='pressure') + wdir = r.get('windDirection', group_by='pressure') + wspd = r.get('windSpeed', group_by='pressure') + + # QualityMark + qmpr = r.get('pressureQM', group_by='pressure') + qmat = r.get('airTemperatureQM', group_by='pressure') + qmdd = r.get('dewpointTemperatureQM', group_by='pressure') + qmwn = r.get('windSpeedQM', group_by='pressure') + + # ObsError + logger.debug(f"Generating ObsError array with missing value...") + pressureOE = np.float32(np.ma.masked_array(np.full((len(prlc)), 0.0))) + airTemperaturOE = np.float32(np.ma.masked_array(np.full((len(tmdb)), 0.0))) + dewpointTemperatureOE = np.float32(np.ma.masked_array(np.full((len(tmdp)), 0.0))) + windEastwardOE = np.float32(np.ma.masked_array(np.full((len(wspd)), 0.0))) + windNorthwardOE = np.float32(np.ma.masked_array(np.full((len(wspd)), 0.0))) + + end_time = time.time() + running_time = end_time - start_time + logger.info(f'Processing time for executing QuerySet to get ResultSet : {running_time} seconds') + + # ========================= + # Create derived variables + # ========================= + start_time = time.time() + + logger.info('Creating derived variables') + logger.debug('Creating derived variables - wind components (uob and vob)') + + uob, vob = Compute_WindComponents_from_WindDirection_and_WindSpeed(wdir, wspd) + logger.debug(f' uob min/max = {uob.min()} {uob.max()}') + logger.debug(f' vob min/max = {vob.min()} {vob.max()}') + + end_time = time.time() + running_time = end_time - start_time + logger.info(f'Processing time for creating derived variables : {running_time} seconds') + + logger.debug('Executing QuerySet for ADPUPA: Check BUFR variable generic dimension and type') + # Check BUFR variable generic dimension and type + logger.debug(f' clat shape = {clat.shape}') + logger.debug(f' clon shape = {clon.shape}') + logger.debug(f' rpid shape = {rpid.shape}') + logger.debug(f' selv shape = {selv.shape}') + logger.debug(f' prlc shape = {prlc.shape}') + + logger.debug(f' rcts shape = {rcts.shape}') + logger.debug(f' rcyr shape = {receiptYear.shape}') + logger.debug(f' rcmo shape = {receiptMonth.shape}') + logger.debug(f' rcdy shape = {receiptDay.shape}') + logger.debug(f' rchr shape = {receiptHour.shape}') + logger.debug(f' rcmi shape = {receiptMinute.shape}') + + logger.debug(f' ualnhr shape = {launchHour.shape}') + logger.debug(f' ualnmn shape = {launchMinute.shape}') + + logger.debug(f' tmdb shape = {tmdb.shape}') + logger.debug(f' tmdp shape = {tmdp.shape}') + logger.debug(f' wdir shape = {wdir.shape}') + logger.debug(f' wspd shape = {wspd.shape}') + logger.debug(f' uob shape = {uob.shape}') + logger.debug(f' vob shape = {vob.shape}') + + logger.debug(f' qmpr shape = {qmpr.shape}') + logger.debug(f' qmat shape = {qmat.shape}') + logger.debug(f' qmdd shape = {qmdd.shape}') + logger.debug(f' qmwn shape = {qmwn.shape}') + + logger.debug(f' clat type = {clat.dtype}') + logger.debug(f' clon type = {clon.dtype}') + logger.debug(f' rpid type = {rpid.dtype}') + logger.debug(f' selv type = {selv.dtype}') + logger.debug(f' prlc type = {prlc.dtype}') + + logger.debug(f' rcts type = {rcts.dtype}') + logger.debug(f' rcyr type = {receiptYear.dtype}') + logger.debug(f' rcmo type = {receiptMonth.dtype}') + logger.debug(f' rcdy type = {receiptDay.dtype}') + logger.debug(f' rchr type = {receiptHour.dtype}') + logger.debug(f' rcmi type = {receiptMinute.dtype}') + + logger.debug(f' ualnhr type = {launchHour.dtype}') + logger.debug(f' ualnmn type = {launchMinute.dtype}') + + logger.debug(f' tmdb type = {tmdb.dtype}') + logger.debug(f' tmdp type = {tmdp.dtype}') + logger.debug(f' wdir type = {wdir.dtype}') + logger.debug(f' wspd type = {wspd.dtype}') + logger.debug(f' uob type = {uob.dtype}') + logger.debug(f' vob type = {vob.dtype}') + + logger.debug(f' qmpr type = {qmpr.dtype}') + logger.debug(f' qmat type = {qmat.dtype}') + logger.debug(f' qmdd type = {qmdd.dtype}') + logger.debug(f' qmwn type = {qmwn.dtype}') + + # ===================================== + # Create IODA ObsSpace + # Write IODA output + # ===================================== + + # Create the dimensions + dims = {'Location': np.arange(0, clat.shape[0])} + + # Create IODA ObsSpace + iodafile = f"{cycle_type}.t{hh}z.{data_type}.nc" + OUTPUT_PATH = os.path.join(ioda_dir, iodafile) + logger.info(f"Create output file: {OUTPUT_PATH}") + obsspace = ioda_ospace.ObsSpace(OUTPUT_PATH, mode='w', dim_dict=dims) + + # Create Global attributes + logger.debug(' ... ... Create global attributes') + obsspace.write_attr('sourceFiles', bufrfile) + obsspace.write_attr('description', data_description) + + # Create IODA variables + logger.debug(f" ... ... Create variables: name, type, units, and attributes") + + # Datetime + obsspace.create_var('MetaData/dateTime', dtype=timestamp.dtype, fillval=timestamp.fill_value) \ + .write_attr('units', 'seconds since 1970-01-01T00:00:00Z') \ + .write_attr('long_name', 'Datetime') \ + .write_data(timestamp) + + # Latitude + obsspace.create_var('MetaData/latitude', dtype=clat.dtype, fillval=clat.fill_value) \ + .write_attr('units', 'degrees_north') \ + .write_attr('valid_range', np.array([-90, 90], dtype=np.float32)) \ + .write_attr('long_name', 'Latitude') \ + .write_data(clat) + + # Longitude + obsspace.create_var('MetaData/longitude', dtype=clon.dtype, fillval=clon.fill_value) \ + .write_attr('units', 'degrees_east') \ + .write_attr('valid_range', np.array([-180, 180], dtype=np.float32)) \ + .write_attr('long_name', 'Longitude') \ + .write_data(clon) + + # Station Identification + obsspace.create_var('MetaData/stationIdentification', dtype=rpid.dtype, fillval=rpid.fill_value) \ + .write_attr('long_name', 'Station Identification') \ + .write_data(rpid) + + # Station Elevation + obsspace.create_var('MetaData/stationElevation', dtype=selv.dtype, fillval=selv.fill_value) \ + .write_attr('units', 'm') \ + .write_attr('long_name', 'Station Elevation') \ + .write_data(selv) + + # Pressure + obsspace.create_var('MetaData/pressure', dtype=prlc.dtype, fillval=prlc.fill_value) \ + .write_attr('units', 'Pa') \ + .write_attr('long_name', 'Pressure') \ + .write_data(prlc) + + # ReceiptTime + obsspace.create_var('MetaData/receiptTime', dtype=timestamp.dtype, fillval=timestamp.fill_value) \ + .write_attr('units', 'seconds since 1970-01-01T00:00:00Z') \ + .write_attr('long_name', 'Receipt Time') \ + .write_data(receipttime) + + # LaunchTime + obsspace.create_var('MetaData/launchTime', dtype=timestamp.dtype, fillval=timestamp.fill_value) \ + .write_attr('units', 'seconds since 1970-01-01T00:00:00Z') \ + .write_attr('long_name', 'Launch Time') \ + .write_data(launchtime) + + # Air Temperature + obsspace.create_var('ObsValue/airTemperature', dtype=tmdb.dtype, fillval=tmdb.fill_value) \ + .write_attr('units', 'K') \ + .write_attr('long_name', 'Air Temperature') \ + .write_data(tmdb) + + # DewPoint Temperature + obsspace.create_var('ObsValue/dewPointTemperature', dtype=tmdp.dtype, fillval=tmdp.fill_value) \ + .write_attr('units', 'K') \ + .write_attr('long_name', 'DewPoint Temperature') \ + .write_data(tmdp) + + # Eastward Wind + obsspace.create_var('ObsValue/windEastward', dtype=uob.dtype, fillval=uob.fill_value) \ + .write_attr('units', 'm s-1') \ + .write_attr('long_name', 'Eastward Wind') \ + .write_data(uob) + + # Northward Wind + obsspace.create_var('ObsValue/windNorthward', dtype=vob.dtype, fillval=vob.fill_value) \ + .write_attr('units', 'm s-1') \ + .write_attr('long_name', 'Northward Wind') \ + .write_data(vob) + + # Pressure Quality Marker + obsspace.create_var('QualityMarker/pressure', dtype=qmpr.dtype, fillval=qmpr.fill_value) \ + .write_attr('long_name', 'Pressure Quality Marker') \ + .write_data(qmpr) + + # Air Temperature Quality Marker + obsspace.create_var('QualityMarker/airTemperature', dtype=qmat.dtype, fillval=qmat.fill_value) \ + .write_attr('long_name', 'Temperature Quality Marker') \ + .write_data(qmat) + + # DewPoint Temperature Quality Marker + obsspace.create_var('QualityMarker/dewPointTemperature', dtype=qmdd.dtype, fillval=qmdd.fill_value) \ + .write_attr('long_name', 'DewPoint Temperature Quality Marker') \ + .write_data(qmdd) + + # Eastward Wind Quality Marker + obsspace.create_var('QualityMarker/windEastward', dtype=qmwn.dtype, fillval=qmwn.fill_value) \ + .write_attr('long_name', 'Eastward Wind Quality Marker') \ + .write_data(qmwn) + + # Northward Wind Quality Marker + obsspace.create_var('QualityMarker/windNorthward', dtype=qmwn.dtype, fillval=qmwn.fill_value) \ + .write_attr('long_name', 'Northward Wind Quality Marker') \ + .write_data(qmwn) + + # Pressure Observation Error + obsspace.create_var('ObsError/pressure', dtype=prlc.dtype, fillval=prlc.fill_value) \ + .write_attr('units', 'Pa') \ + .write_attr('long_name', 'Pressure Observation Error') \ + .write_data(pressureOE) + + # Air Temperature Observation Error + obsspace.create_var('ObsError/airTemperature', dtype=tmdb.dtype, fillval=tmdb.fill_value) \ + .write_attr('units', 'K') \ + .write_attr('long_name', 'Air Temperature Observation Error') \ + .write_data(airTemperaturOE) + + # DewPoint Temperature Observation Error + obsspace.create_var('ObsError/dewPointTemperature', dtype=tmdp.dtype, fillval=tmdp.fill_value) \ + .write_attr('units', 'K') \ + .write_attr('long_name', 'DewPoint Temperature Observation Error') \ + .write_data(dewpointTemperatureOE) + + # Eastward Wind Observation Error + obsspace.create_var('ObsError/windEastward', dtype=uob.dtype, fillval=uob.fill_value) \ + .write_attr('units', 'm s-1') \ + .write_attr('long_name', 'Eastward Wind Observation Error') \ + .write_data(windEastwardOE) + + # Northward Wind Observation Error + obsspace.create_var('ObsError/windNorthward', dtype=vob.dtype, fillval=vob.fill_value) \ + .write_attr('units', 'm s-1') \ + .write_attr('long_name', 'Northward Wind Observation Error') \ + .write_data(windNorthwardOE) + + end_time = time.time() + running_time = end_time - start_time + logger.info(f"Running time for splitting and output IODA: {running_time} seconds") + + logger.info("All Done!") + + +if __name__ == '__main__': + + start_time = time.time() + + parser = argparse.ArgumentParser() + parser.add_argument('-c', '--config', type=str, help='Input JSON configuration', required=True) + parser.add_argument('-v', '--verbose', help='print debug logging information', + action='store_true') + args = parser.parse_args() + + log_level = 'DEBUG' if args.verbose else 'INFO' + logger = Logger('BUFR2IODA_adpupa.py', level=log_level, colored_log=True) + + with open(args.config, "r") as json_file: + config = json.load(json_file) + + bufr_to_ioda(config, logger) + + end_time = time.time() + running_time = end_time - start_time + logger.info(f"Total running time: {running_time} seconds") diff --git a/ush/ioda/bufr2ioda/bufr2ioda_satwind_amv_ahi.py b/ush/ioda/bufr2ioda/bufr2ioda_satwnd_amv_ahi.py similarity index 98% rename from ush/ioda/bufr2ioda/bufr2ioda_satwind_amv_ahi.py rename to ush/ioda/bufr2ioda/bufr2ioda_satwnd_amv_ahi.py index 3f897e61d..e4260184c 100755 --- a/ush/ioda/bufr2ioda/bufr2ioda_satwind_amv_ahi.py +++ b/ush/ioda/bufr2ioda/bufr2ioda_satwnd_amv_ahi.py @@ -391,8 +391,8 @@ def bufr_to_ioda(config, logger): .write_data(ogce2) # Quality: Percent Confidence - Quality Information Without Forecast - obsspace.create_var('MetaData/qualityInformationWithoutForecast', dtype=qifn2.dtype, fillval=qifn2.fill_value) \ - .write_attr('long_name', 'Quality Information Without Forecast') \ + obsspace.create_var('MetaData/qiWithoutForecast', dtype=qifn2.dtype, fillval=qifn2.fill_value) \ + .write_attr('long_name', 'QI Without Forecast') \ .write_data(qifn2) # Wind Computation Method @@ -400,16 +400,6 @@ def bufr_to_ioda(config, logger): .write_attr('long_name', 'Satellite-derived Wind Computation Method') \ .write_data(swcm2) - # ObsType based on computation method/spectral band - obsspace.create_var('ObsType/windEastward', dtype=obstype2.dtype, fillval=swcm2.fill_value) \ - .write_attr('long_name', 'Observation Type based on Satellite-derived Wind Computation Method and Spectral Band') \ - .write_data(obstype2) - - # ObsType based on computation method/spectral band - obsspace.create_var('ObsType/windNorthward', dtype=obstype2.dtype, fillval=swcm2.fill_value) \ - .write_attr('long_name', 'Observation Type based on Satellite-derived Wind Computation Method and Spectral Band') \ - .write_data(obstype2) - # Pressure obsspace.create_var('MetaData/pressure', dtype=pressure2.dtype, fillval=pressure2.fill_value) \ .write_attr('units', 'pa') \ @@ -428,6 +418,16 @@ def bufr_to_ioda(config, logger): .write_attr('long_name', 'Station Elevation') \ .write_data(stnelev2) + # ObsType based on computation method/spectral band + obsspace.create_var('ObsType/windEastward', dtype=obstype2.dtype, fillval=swcm2.fill_value) \ + .write_attr('long_name', 'Observation Type based on Satellite-derived Wind Computation Method and Spectral Band') \ + .write_data(obstype2) + + # ObsType based on computation method/spectral band + obsspace.create_var('ObsType/windNorthward', dtype=obstype2.dtype, fillval=swcm2.fill_value) \ + .write_attr('long_name', 'Observation Type based on Satellite-derived Wind Computation Method and Spectral Band') \ + .write_data(obstype2) + # U-Wind Component obsspace.create_var('ObsValue/windEastward', dtype=uob2.dtype, fillval=wspd2.fill_value) \ .write_attr('units', 'm s-1') \ diff --git a/ush/ioda/bufr2ioda/bufr2ioda_satwind_amv_avhrr.py b/ush/ioda/bufr2ioda/bufr2ioda_satwnd_amv_avhrr.py similarity index 98% rename from ush/ioda/bufr2ioda/bufr2ioda_satwind_amv_avhrr.py rename to ush/ioda/bufr2ioda/bufr2ioda_satwnd_amv_avhrr.py index 6ad0bee4d..9ad22a55b 100755 --- a/ush/ioda/bufr2ioda/bufr2ioda_satwind_amv_avhrr.py +++ b/ush/ioda/bufr2ioda/bufr2ioda_satwnd_amv_avhrr.py @@ -386,8 +386,8 @@ def bufr_to_ioda(config, logger): .write_data(ogce2) # Quality: Percent Confidence - Quality Information Without Forecast - obsspace.create_var('MetaData/qualityInformationWithoutForecast', dtype=qifn2.dtype, fillval=qifn2.fill_value) \ - .write_attr('long_name', 'Quality Information Without Forecast') \ + obsspace.create_var('MetaData/qiWithoutForecast', dtype=qifn2.dtype, fillval=qifn2.fill_value) \ + .write_attr('long_name', 'QI Without Forecast') \ .write_data(qifn2) # Wind Computation Method @@ -395,16 +395,6 @@ def bufr_to_ioda(config, logger): .write_attr('long_name', 'Satellite-derived Wind Computation Method') \ .write_data(swcm2) - # ObsType based on computation method/spectral band - obsspace.create_var('ObsType/windEastward', dtype=obstype2.dtype, fillval=swcm2.fill_value) \ - .write_attr('long_name', 'Observation Type based on Satellite-derived Wind Computation Method and Spectral Band') \ - .write_data(obstype2) - - # ObsType based on computation method/spectral band - obsspace.create_var('ObsType/windNorthward', dtype=obstype2.dtype, fillval=swcm2.fill_value) \ - .write_attr('long_name', 'Observation Type based on Satellite-derived Wind Computation Method and Spectral Band') \ - .write_data(obstype2) - # Pressure obsspace.create_var('MetaData/pressure', dtype=pressure2.dtype, fillval=pressure2.fill_value) \ .write_attr('units', 'pa') \ @@ -423,6 +413,16 @@ def bufr_to_ioda(config, logger): .write_attr('long_name', 'Station Elevation') \ .write_data(stnelev2) + # ObsType based on computation method/spectral band + obsspace.create_var('ObsType/windEastward', dtype=obstype2.dtype, fillval=swcm2.fill_value) \ + .write_attr('long_name', 'Observation Type based on Satellite-derived Wind Computation Method and Spectral Band') \ + .write_data(obstype2) + + # ObsType based on computation method/spectral band + obsspace.create_var('ObsType/windNorthward', dtype=obstype2.dtype, fillval=swcm2.fill_value) \ + .write_attr('long_name', 'Observation Type based on Satellite-derived Wind Computation Method and Spectral Band') \ + .write_data(obstype2) + # U-Wind Component obsspace.create_var('ObsValue/windEastward', dtype=uob2.dtype, fillval=uob2.fill_value) \ .write_attr('units', 'm s-1') \ diff --git a/ush/ioda/bufr2ioda/bufr2ioda_satwind_amv_goes.py b/ush/ioda/bufr2ioda/bufr2ioda_satwnd_amv_goes.py similarity index 98% rename from ush/ioda/bufr2ioda/bufr2ioda_satwind_amv_goes.py rename to ush/ioda/bufr2ioda/bufr2ioda_satwnd_amv_goes.py index 594e5f008..7ab41d63b 100755 --- a/ush/ioda/bufr2ioda/bufr2ioda_satwind_amv_goes.py +++ b/ush/ioda/bufr2ioda/bufr2ioda_satwnd_amv_goes.py @@ -383,8 +383,8 @@ def bufr_to_ioda(config, logger): .write_data(ogce2) # Quality: Percent Confidence - Quality Information Without Forecast - obsspace.create_var('MetaData/qualityInformationWithoutForecast', dtype=qifn2.dtype, fillval=qifn2.fill_value) \ - .write_attr('long_name', 'Quality Information Without Forecast') \ + obsspace.create_var('MetaData/qiWithoutForecast', dtype=qifn2.dtype, fillval=qifn2.fill_value) \ + .write_attr('long_name', 'QI Without Forecast') \ .write_data(qifn2) # Quality: Percent Confidence - Expected Error @@ -408,16 +408,6 @@ def bufr_to_ioda(config, logger): .write_attr('long_name', 'Wind Height Assignment Method') \ .write_data(eham2) - # ObsType based on computation method/spectral band - obsspace.create_var('ObsType/windEastward', dtype=obstype2.dtype, fillval=swcm2.fill_value) \ - .write_attr('long_name', 'Observation Type based on Satellite-derived Wind Computation Method and Spectral Band') \ - .write_data(obstype2) - - # ObsType based on computation method/spectral band - obsspace.create_var('ObsType/windNorthward', dtype=obstype2.dtype, fillval=swcm2.fill_value) \ - .write_attr('long_name', 'Observation Type based on Satellite-derived Wind Computation Method and Spectral Band') \ - .write_data(obstype2) - # Pressure obsspace.create_var('MetaData/pressure', dtype=pressure2.dtype, fillval=pressure2.fill_value) \ .write_attr('units', 'pa') \ @@ -436,6 +426,16 @@ def bufr_to_ioda(config, logger): .write_attr('long_name', 'Station Elevation') \ .write_data(stnelev2) + # ObsType based on computation method/spectral band + obsspace.create_var('ObsType/windEastward', dtype=obstype2.dtype, fillval=swcm2.fill_value) \ + .write_attr('long_name', 'Observation Type based on Satellite-derived Wind Computation Method and Spectral Band') \ + .write_data(obstype2) + + # ObsType based on computation method/spectral band + obsspace.create_var('ObsType/windNorthward', dtype=obstype2.dtype, fillval=swcm2.fill_value) \ + .write_attr('long_name', 'Observation Type based on Satellite-derived Wind Computation Method and Spectral Band') \ + .write_data(obstype2) + # U-Wind Component obsspace.create_var('ObsValue/windEastward', dtype=uob2.dtype, fillval=wspd2.fill_value) \ .write_attr('units', 'm s-1') \ diff --git a/ush/ioda/bufr2ioda/bufr2ioda_satwnd_amv_leogeo.py b/ush/ioda/bufr2ioda/bufr2ioda_satwnd_amv_leogeo.py new file mode 100755 index 000000000..d879293b6 --- /dev/null +++ b/ush/ioda/bufr2ioda/bufr2ioda_satwnd_amv_leogeo.py @@ -0,0 +1,445 @@ +#!/usr/bin/env python3 +import argparse +import numpy as np +import numpy.ma as ma +from pyiodaconv import bufr +import calendar +import json +import time +import math +import datetime +import os +from datetime import datetime +from pyioda import ioda_obs_space as ioda_ospace +from wxflow import Logger + +# ==================================================================== +# Satellite Winds (AMV) BUFR dump file for multi-satellite LEOGEO +# ==================================================================== +# Subset | Spectral Band | Code (002023) | ObsType +# -------------------------------------------------------------------- +# NC005072 | IRLW (Freq < 5E+13) | Method 1 | 255 +# ==================================================================== + +# Define and initialize global variables +global float32_fill_value +global int32_fill_value +global int64_fill_value + +float32_fill_value = np.float32(0) +int32_fill_value = np.int32(0) +int64_fill_value = np.int64(0) + + +def Compute_WindComponents_from_WindDirection_and_WindSpeed(wdir, wspd): + + uob = (-wspd * np.sin(np.radians(wdir))).astype(np.float32) + vob = (-wspd * np.cos(np.radians(wdir))).astype(np.float32) + + return uob, vob + + +def Get_ObsType(swcm, chanfreq): + + obstype = swcm.copy() + + # Use numpy vectorized operations + obstype = np.where(swcm == 1, 255, obstype) # IRLW + + if not np.any(np.isin(obstype, [255])): + raise ValueError("Error: Unassigned ObsType found ... ") + + return obstype + + +def bufr_to_ioda(config, logger): + + subsets = config["subsets"] + logger.debug(f"Checking subsets = {subsets}") + + # Get parameters from configuration + subsets = config["subsets"] + data_format = config["data_format"] + data_type = config["data_type"] + data_description = config["data_description"] + data_provider = config["data_provider"] + cycle_type = config["cycle_type"] + dump_dir = config["dump_directory"] + ioda_dir = config["ioda_directory"] + cycle = config["cycle_datetime"] + yyyymmdd = cycle[0:8] + hh = cycle[8:10] + + satellite_info_array = config["satellite_info"] + sensor_name = config["sensor_info"]["sensor_name"] + sensor_full_name = config["sensor_info"]["sensor_full_name"] + sensor_id = config["sensor_info"]["sensor_id"] + + # Get derived parameters + yyyymmdd = cycle[0:8] + hh = cycle[8:10] + reference_time = datetime.strptime(cycle, "%Y%m%d%H") + reference_time = reference_time.strftime("%Y-%m-%dT%H:%M:%SZ") + + # General informaton + converter = 'BUFR to IODA Converter' + process_level = 'Level-2' + platform_description = 'TERRA/AQUA' + sensor_description = 'Moderate Resolution Imaging Spectroradiometer' + + logger.info(f'sensor_name = {sensor_name}') + logger.info(f'sensor_full_name = {sensor_full_name}') + logger.info(f'sensor_id = {sensor_id}') + logger.info(f'reference_time = {reference_time}') + + bufrfile = f"{cycle_type}.t{hh}z.{data_type}.tm00.{data_format}" + DATA_PATH = os.path.join(dump_dir, f"{cycle_type}.{yyyymmdd}", str(hh), 'atmos', bufrfile) + if not os.path.isfile(DATA_PATH): + logger.info(f"DATA_PATH {DATA_PATH} does not exist") + return + logger.debug(f"The DATA_PATH is: {DATA_PATH}") + + # ============================================ + # Make the QuerySet for all the data we want + # ============================================ + start_time = time.time() + + logger.info('Making QuerySet') + q = bufr.QuerySet(subsets) + + # MetaData + q.add('latitude', '*/CLATH') + q.add('longitude', '*/CLONH') + q.add('satelliteId', '*/SAID') + q.add('year', '*/YEAR') + q.add('month', '*/MNTH') + q.add('day', '*/DAYS') + q.add('hour', '*/HOUR') + q.add('minute', '*/MINU') + q.add('second', '*/SECO') + q.add('satelliteZenithAngle', '*/SAZA') + q.add('sensorCentralFrequency', '*/SCCF') + q.add('pressure', '*/PRLC[1]') + + # Processing Center + # GCLONG takes place of OGCE in this subset + q.add('dataProviderOrigin', '*/GCLONG') + # There are 3 replications of GNAP, GNAP[1] appears to have values of 1 == EUMETSAT QI without forecast + q.add('windGeneratingApplication', 'GNAP[1]') + + # Quality Infomation (Quality Indicator w/o forecast) + # There are 3 replications of PCCF, PCCF[1] corresponds to GNAP[1] == EUMETSAT QI without forecast + q.add('qualityInformationWithoutForecast', '*/LGRSQ4[1]/PCCF') + + # Wind Retrieval Method Information + q.add('windComputationMethod', '*/SWCM') + + # ObsValue + q.add('windDirection', '*/WDIR') + q.add('windSpeed', '*/WSPD') + + end_time = time.time() + running_time = end_time - start_time + logger.debug(f'Processing time for making QuerySet : {running_time} seconds') + + # ============================================================== + # Open the BUFR file and execute the QuerySet to get ResultSet + # Use the ResultSet returned to get numpy arrays of the data + # ============================================================== + start_time = time.time() + + logger.info('Executing QuerySet to get ResultSet') + with bufr.File(DATA_PATH) as f: + try: + r = f.execute(q) + except Exception as err: + logger.info(f'Return with {err}') + return + + # MetaData + satid = r.get('satelliteId') + year = r.get('year') + month = r.get('month') + day = r.get('day') + hour = r.get('hour') + minute = r.get('minute') + second = r.get('second') + lat = r.get('latitude') + lon = r.get('longitude') + satzenang = r.get('satelliteZenithAngle') + pressure = r.get('pressure', type='float') + chanfreq = r.get('sensorCentralFrequency', type='float') + + # Processing Center + ogce = r.get('dataProviderOrigin') # drawn from GLONGC mnemonic, but OGCE == GLONGC + gnap = r.get('windGeneratingApplication') + + # Quality Information + qifn = r.get('qualityInformationWithoutForecast', type='float') + + # Wind Retrieval Method Information + swcm = r.get('windComputationMethod') + # ObsValue + # Wind direction and Speed + wdir = r.get('windDirection', type='float') + wspd = r.get('windSpeed') + + # DateTime: seconds since Epoch time + # IODA has no support for numpy datetime arrays dtype=datetime64[s] + timestamp = r.get_datetime('year', 'month', 'day', 'hour', 'minute', 'second').astype(np.int64) + + # Check BUFR variable generic dimension and type + + # Global variables declaration + # Set global fill values + float32_fill_value = satzenang.fill_value + int32_fill_value = satid.fill_value + int64_fill_value = timestamp.fill_value.astype(np.int64) + + end_time = time.time() + running_time = end_time - start_time + logger.info(f'Processing time for executing QuerySet to get ResultSet : {running_time} seconds') + + # ========================= + # Create derived variables + # ========================= + start_time = time.time() + + logger.info('Creating derived variables') + logger.debug('Creating derived variables - wind components (uob and vob)') + + uob, vob = Compute_WindComponents_from_WindDirection_and_WindSpeed(wdir, wspd) + + logger.debug(f' uob min/max = {uob.min()} {uob.max()}') + logger.debug(f' vob min/max = {vob.min()} {vob.max()}') + + obstype = Get_ObsType(swcm, chanfreq) + + height = np.full_like(pressure, fill_value=pressure.fill_value, dtype=np.float32) + stnelev = np.full_like(pressure, fill_value=pressure.fill_value, dtype=np.float32) + + end_time = time.time() + running_time = end_time - start_time + logger.info(f'Processing time for creating derived variables : {running_time} seconds') + + # ===================================== + # Split output based on satellite id + # Create IODA ObsSpace + # Write IODA output + # ===================================== + logger.info('Create IODA ObsSpace and Write IODA output based on satellite ID') + + # Find unique satellite identifiers in data to process + unique_satids = np.unique(satid) + logger.info(f'Number of Unique satellite identifiers: {len(unique_satids)}') + logger.info(f'Unique satellite identifiers: {unique_satids}') + + logger.debug(f'Loop through unique satellite identifier {unique_satids}') + total_ob_processed = 0 + for sat in unique_satids.tolist(): + start_time = time.time() + + matched = False + for satellite_info in satellite_info_array: + if (satellite_info["satellite_id"] == sat): + matched = True + satellite_id = satellite_info["satellite_id"] + satellite_name = satellite_info["satellite_name"] + satinst = sensor_name.lower()+'_'+satellite_name.lower() + logger.debug(f'Split data for {satinst} satid = {sat}') + + if matched: + + # Define a boolean mask to subset data from the original data object + mask = satid == sat + # MetaData + lon2 = lon[mask] + lat2 = lat[mask] + timestamp2 = timestamp[mask] + satid2 = satid[mask] + satzenang2 = satzenang[mask] + chanfreq2 = chanfreq[mask] + obstype2 = obstype[mask] + pressure2 = pressure[mask] + height2 = height[mask] + stnelev2 = stnelev[mask] + + # Processing Center + ogce2 = ogce[mask] + + # QC Info + qifn2 = qifn[mask] + + # Method + swcm2 = swcm[mask] + + # ObsValue + wdir2 = wdir[mask] + wspd2 = wspd[mask] + uob2 = uob[mask] + vob2 = vob[mask] + + # Timestamp Range + timestamp2_min = datetime.fromtimestamp(timestamp2.min()) + timestamp2_max = datetime.fromtimestamp(timestamp2.max()) + + # Check unique observation time + unique_timestamp2 = np.unique(timestamp2) + logger.debug(f'Processing output for satid {sat}') + + # Create the dimensions + dims = { + 'Location': np.arange(0, wdir2.shape[0]) + } + + # Create IODA ObsSpace + iodafile = f"{cycle_type}.t{hh}z.{data_type}.{satinst}.tm00.nc" + OUTPUT_PATH = os.path.join(ioda_dir, iodafile) + logger.info(f'Create output file : {OUTPUT_PATH}') + obsspace = ioda_ospace.ObsSpace(OUTPUT_PATH, mode='w', dim_dict=dims) + + # Create Global attributes + logger.debug('Write global attributes') + obsspace.write_attr('Converter', converter) + obsspace.write_attr('sourceFiles', bufrfile) + obsspace.write_attr('dataProviderOrigin', data_provider) + obsspace.write_attr('description', data_description) + obsspace.write_attr('datetimeReference', reference_time) + obsspace.write_attr('datetimeRange', [str(timestamp2_min), str(timestamp2_max)]) + obsspace.write_attr('sensor', sensor_id) + obsspace.write_attr('platform', satellite_id) + obsspace.write_attr('platformCommonName', satellite_name) + obsspace.write_attr('sensorCommonName', sensor_name) + obsspace.write_attr('processingLevel', process_level) + obsspace.write_attr('platformLongDescription', platform_description) + obsspace.write_attr('sensorLongDescription', sensor_description) + + # Create IODA variables + logger.debug('Write variables: name, type, units, and attributes') + # Longitude + obsspace.create_var('MetaData/longitude', dtype=lon2.dtype, fillval=lon2.fill_value) \ + .write_attr('units', 'degrees_east') \ + .write_attr('valid_range', np.array([-180, 180], dtype=np.float32)) \ + .write_attr('long_name', 'Longitude') \ + .write_data(lon2) + + # Latitude + obsspace.create_var('MetaData/latitude', dtype=lat.dtype, fillval=lat2.fill_value) \ + .write_attr('units', 'degrees_north') \ + .write_attr('valid_range', np.array([-90, 90], dtype=np.float32)) \ + .write_attr('long_name', 'Latitude') \ + .write_data(lat2) + + # Datetime + obsspace.create_var('MetaData/dateTime', dtype=np.int64, fillval=int64_fill_value) \ + .write_attr('units', 'seconds since 1970-01-01T00:00:00Z') \ + .write_attr('long_name', 'Datetime') \ + .write_data(timestamp2) + + # Satellite Identifier + obsspace.create_var('MetaData/satelliteIdentifier', dtype=satid2.dtype, fillval=satid2.fill_value) \ + .write_attr('long_name', 'Satellite Identifier') \ + .write_data(satid2) + + # Sensor Zenith Angle + obsspace.create_var('MetaData/satelliteZenithAngle', dtype=satzenang2.dtype, fillval=satzenang2.fill_value) \ + .write_attr('units', 'degree') \ + .write_attr('valid_range', np.array([0, 90], dtype=np.float32)) \ + .write_attr('long_name', 'Satellite Zenith Angle') \ + .write_data(satzenang2) + + # Sensor Centrall Frequency + obsspace.create_var('MetaData/sensorCentralFrequency', dtype=chanfreq2.dtype, fillval=chanfreq2.fill_value) \ + .write_attr('units', 'Hz') \ + .write_attr('long_name', 'Satellite Channel Center Frequency') \ + .write_data(chanfreq2) + + # Data Provider + obsspace.create_var('MetaData/dataProviderOrigin', dtype=ogce2.dtype, fillval=ogce2.fill_value) \ + .write_attr('long_name', 'Identification of Originating/Generating Center') \ + .write_data(ogce2) + + # Quality: Percent Confidence - Quality Information Without Forecast + obsspace.create_var('MetaData/qiWithoutForecast', dtype=qifn2.dtype, fillval=qifn2.fill_value) \ + .write_attr('long_name', 'Quality Information Without Forecast') \ + .write_data(qifn2) + + # Wind Computation Method + obsspace.create_var('MetaData/windComputationMethod', dtype=swcm2.dtype, fillval=swcm2.fill_value) \ + .write_attr('long_name', 'Satellite-derived Wind Computation Method') \ + .write_data(swcm2) + + # Pressure + obsspace.create_var('MetaData/pressure', dtype=pressure2.dtype, fillval=pressure2.fill_value) \ + .write_attr('units', 'pa') \ + .write_attr('long_name', 'Pressure') \ + .write_data(pressure2) + + # Height (mimic prepbufr) + obsspace.create_var('MetaData/height', dtype=height2.dtype, fillval=height2.fill_value) \ + .write_attr('units', 'm') \ + .write_attr('long_name', 'Height of Observation') \ + .write_data(height2) + + # Station Elevation (mimic prepbufr) + obsspace.create_var('MetaData/stationElevation', dtype=stnelev2.dtype, fillval=stnelev2.fill_value) \ + .write_attr('units', 'm') \ + .write_attr('long_name', 'Station Elevation') \ + .write_data(stnelev2) + + # ObsType based on computation method/spectral band + obsspace.create_var('ObsType/windEastward', dtype=obstype2.dtype, fillval=swcm2.fill_value) \ + .write_attr('long_name', 'Observation Type based on Satellite-derived Wind Computation Method and Spectral Band') \ + .write_data(obstype2) + + # ObsType based on computation method/spectral band + obsspace.create_var('ObsType/windNorthward', dtype=obstype2.dtype, fillval=swcm2.fill_value) \ + .write_attr('long_name', 'Observation Type based on Satellite-derived Wind Computation Method and Spectral Band') \ + .write_data(obstype2) + + # U-Wind Component + obsspace.create_var('ObsValue/windEastward', dtype=uob2.dtype, fillval=uob2.fill_value) \ + .write_attr('units', 'm s-1') \ + .write_attr('long_name', 'Eastward Wind Component') \ + .write_data(uob2) + + # V-Wind Component + obsspace.create_var('ObsValue/windNorthward', dtype=vob2.dtype, fillval=vob2.fill_value) \ + .write_attr('units', 'm s-1') \ + .write_attr('long_name', 'Northward Wind Component') \ + .write_data(vob2) + + end_time = time.time() + running_time = end_time - start_time + total_ob_processed += len(satid2) + logger.debug(f'Number of observation processed : {len(satid2)}') + logger.debug(f'Processing time for splitting and output IODA for {satinst} : {running_time} seconds') + + else: + logger.info(f"Do not find this satellite id in the configuration: satid = {sat}") + + logger.info("All Done!") + logger.info(f'Total number of observation processed : {total_ob_processed}') + + +if __name__ == '__main__': + + start_time = time.time() + + parser = argparse.ArgumentParser() + parser.add_argument('-c', '--config', type=str, help='Input JSON configuration', required=True) + parser.add_argument('-v', '--verbose', help='print debug logging information', + action='store_true') + args = parser.parse_args() + + log_level = 'DEBUG' if args.verbose else 'INFO' + logger = Logger('BUFR2IODA_satwind_amv_ahi.py', level=log_level, colored_log=True) + + with open(args.config, "r") as json_file: + config = json.load(json_file) + + bufr_to_ioda(config, logger) + + end_time = time.time() + running_time = end_time - start_time + logger.info(f"Total running time: {running_time} seconds") diff --git a/ush/ioda/bufr2ioda/bufr2ioda_satwind_amv_modis.py b/ush/ioda/bufr2ioda/bufr2ioda_satwnd_amv_modis.py similarity index 98% rename from ush/ioda/bufr2ioda/bufr2ioda_satwind_amv_modis.py rename to ush/ioda/bufr2ioda/bufr2ioda_satwnd_amv_modis.py index b67086b76..ff9706b8b 100755 --- a/ush/ioda/bufr2ioda/bufr2ioda_satwind_amv_modis.py +++ b/ush/ioda/bufr2ioda/bufr2ioda_satwnd_amv_modis.py @@ -389,8 +389,8 @@ def bufr_to_ioda(config, logger): .write_data(ogce2) # Quality: Percent Confidence - Quality Information Without Forecast - obsspace.create_var('MetaData/qualityInformationWithoutForecast', dtype=qifn2.dtype, fillval=qifn2.fill_value) \ - .write_attr('long_name', 'Quality Information Without Forecast') \ + obsspace.create_var('MetaData/qiWithoutForecast', dtype=qifn2.dtype, fillval=qifn2.fill_value) \ + .write_attr('long_name', 'QI Without Forecast') \ .write_data(qifn2) # Wind Computation Method diff --git a/ush/ioda/bufr2ioda/bufr2ioda_satwind_amv_seviri.py b/ush/ioda/bufr2ioda/bufr2ioda_satwnd_amv_seviri.py similarity index 98% rename from ush/ioda/bufr2ioda/bufr2ioda_satwind_amv_seviri.py rename to ush/ioda/bufr2ioda/bufr2ioda_satwnd_amv_seviri.py index dc873d962..9f2db7fdd 100755 --- a/ush/ioda/bufr2ioda/bufr2ioda_satwind_amv_seviri.py +++ b/ush/ioda/bufr2ioda/bufr2ioda_satwnd_amv_seviri.py @@ -364,8 +364,8 @@ def bufr_to_ioda(config, logger): .write_data(ogce2) # Quality: Percent Confidence - Quality Information Without Forecast - obsspace.create_var('MetaData/qualityInformationWithoutForecast', dtype=qifn2.dtype, fillval=qifn2.fill_value) \ - .write_attr('long_name', 'Quality Information Without Forecast') \ + obsspace.create_var('MetaData/qiWithoutForecast', dtype=qifn2.dtype, fillval=qifn2.fill_value) \ + .write_attr('long_name', 'QI Without Forecast') \ .write_data(qifn2) # Wind Computation Method @@ -373,16 +373,6 @@ def bufr_to_ioda(config, logger): .write_attr('long_name', 'Satellite-derived Wind Computation Method') \ .write_data(swcm2) - # ObsType based on computation method/spectral band - obsspace.create_var('ObsType/windEastward', dtype=obstype2.dtype, fillval=swcm2.fill_value) \ - .write_attr('long_name', 'Observation Type based on Satellite-derived Wind Computation Method and Spectral Band') \ - .write_data(obstype2) - - # ObsType based on computation method/spectral band - obsspace.create_var('ObsType/windNorthward', dtype=obstype2.dtype, fillval=swcm2.fill_value) \ - .write_attr('long_name', 'Observation Type based on Satellite-derived Wind Computation Method and Spectral Band') \ - .write_data(obstype2) - # Pressure obsspace.create_var('MetaData/pressure', dtype=pressure2.dtype, fillval=pressure2.fill_value) \ .write_attr('units', 'pa') \ @@ -401,6 +391,16 @@ def bufr_to_ioda(config, logger): .write_attr('long_name', 'Station Elevation') \ .write_data(stnelev2) + # ObsType based on computation method/spectral band + obsspace.create_var('ObsType/windEastward', dtype=obstype2.dtype, fillval=swcm2.fill_value) \ + .write_attr('long_name', 'Observation Type based on Satellite-derived Wind Computation Method and Spectral Band') \ + .write_data(obstype2) + + # ObsType based on computation method/spectral band + obsspace.create_var('ObsType/windNorthward', dtype=obstype2.dtype, fillval=swcm2.fill_value) \ + .write_attr('long_name', 'Observation Type based on Satellite-derived Wind Computation Method and Spectral Band') \ + .write_data(obstype2) + # U-Wind Component obsspace.create_var('ObsValue/windEastward', dtype=uob2.dtype, fillval=wspd2.fill_value) \ .write_attr('units', 'm s-1') \ diff --git a/ush/ioda/bufr2ioda/bufr2ioda_satwind_amv_viirs.py b/ush/ioda/bufr2ioda/bufr2ioda_satwnd_amv_viirs.py similarity index 98% rename from ush/ioda/bufr2ioda/bufr2ioda_satwind_amv_viirs.py rename to ush/ioda/bufr2ioda/bufr2ioda_satwnd_amv_viirs.py index 18f87db04..4941ff977 100755 --- a/ush/ioda/bufr2ioda/bufr2ioda_satwind_amv_viirs.py +++ b/ush/ioda/bufr2ioda/bufr2ioda_satwnd_amv_viirs.py @@ -385,8 +385,8 @@ def bufr_to_ioda(config, logger): .write_data(ogce2) # Quality: Percent Confidence - Quality Information Without Forecast - obsspace.create_var('MetaData/qualityInformationWithoutForecast', dtype=qifn2.dtype, fillval=qifn2.fill_value) \ - .write_attr('long_name', 'Quality Information Without Forecast') \ + obsspace.create_var('MetaData/qiWithoutForecast', dtype=qifn2.dtype, fillval=qifn2.fill_value) \ + .write_attr('long_name', 'QI Without Forecast') \ .write_data(qifn2) # Wind Computation Method diff --git a/ush/ioda/bufr2ioda/marine/bufr2ioda_insitu_profiles_glider.py b/ush/ioda/bufr2ioda/marine/bufr2ioda_insitu_profile_glider.py similarity index 99% rename from ush/ioda/bufr2ioda/marine/bufr2ioda_insitu_profiles_glider.py rename to ush/ioda/bufr2ioda/marine/bufr2ioda_insitu_profile_glider.py index 59dbb266d..039966fa4 100755 --- a/ush/ioda/bufr2ioda/marine/bufr2ioda_insitu_profiles_glider.py +++ b/ush/ioda/bufr2ioda/marine/bufr2ioda_insitu_profile_glider.py @@ -195,7 +195,7 @@ def bufr_to_ioda(config, logger): # Create the dimensions dims = {'Location': np.arange(0, lat.shape[0])} - iodafile = f"{cycle_type}.t{hh}z.{data_type}_profiles.{data_format}.nc" + iodafile = f"{cycle_type}.t{hh}z.{data_type}_profiles.{data_format}.nc4" OUTPUT_PATH = os.path.join(ioda_dir, iodafile) logger.debug(f" ... ... Create OUTPUT file: {OUTPUT_PATH}") diff --git a/ush/ioda/bufr2ioda/marine/bufr2ioda_insitu_marinemammals_profiles.py b/ush/ioda/bufr2ioda/marine/bufr2ioda_insitu_profile_marinemammal.py similarity index 99% rename from ush/ioda/bufr2ioda/marine/bufr2ioda_insitu_marinemammals_profiles.py rename to ush/ioda/bufr2ioda/marine/bufr2ioda_insitu_profile_marinemammal.py index e7341b76e..e7b908e2d 100755 --- a/ush/ioda/bufr2ioda/marine/bufr2ioda_insitu_marinemammals_profiles.py +++ b/ush/ioda/bufr2ioda/marine/bufr2ioda_insitu_profile_marinemammal.py @@ -306,7 +306,7 @@ def bufr_to_ioda(config, logger): args = parser.parse_args() log_level = 'DEBUG' if args.verbose else 'INFO' - logger = Logger('bufr2ioda_insitu_marinemammals_profiles.py', level=log_level, colored_log=True) + logger = Logger('bufr2ioda_insitu_profile_marinemammal.py', level=log_level, colored_log=True) with open(args.config, "r") as json_file: config = json.load(json_file) diff --git a/ush/ioda/bufr2ioda/marine/bufr2ioda_insitu_profiles_xbtctd.py b/ush/ioda/bufr2ioda/marine/bufr2ioda_insitu_profile_xbtctd.py similarity index 99% rename from ush/ioda/bufr2ioda/marine/bufr2ioda_insitu_profiles_xbtctd.py rename to ush/ioda/bufr2ioda/marine/bufr2ioda_insitu_profile_xbtctd.py index 8923a5aad..ec0376582 100755 --- a/ush/ioda/bufr2ioda/marine/bufr2ioda_insitu_profiles_xbtctd.py +++ b/ush/ioda/bufr2ioda/marine/bufr2ioda_insitu_profile_xbtctd.py @@ -170,7 +170,7 @@ def bufr_to_ioda(config, logger): # Create the dimensions dims = {'Location': np.arange(0, lat.shape[0])} - iodafile = f"{cycle_type}.t{hh}z.{data_type}_profiles.{data_format}.nc" + iodafile = f"{cycle_type}.t{hh}z.{data_type}_profiles.{data_format}.nc4" OUTPUT_PATH = os.path.join(ioda_dir, iodafile) logger.debug(f" ... ... Create OUTPUT file: {OUTPUT_PATH}") diff --git a/ush/ioda/bufr2ioda/marine/bufr2ioda_insitu_profiles_tesac.py b/ush/ioda/bufr2ioda/marine/bufr2ioda_insitu_profiles_tesac.py deleted file mode 100755 index 23812a515..000000000 --- a/ush/ioda/bufr2ioda/marine/bufr2ioda_insitu_profiles_tesac.py +++ /dev/null @@ -1,319 +0,0 @@ -#!/usr/bin/env python3 -# (C) Copyright 2024 NOAA/NWS/NCEP/EMC -# -# This software is licensed under the terms of the Apache Licence Version 2.0 -# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. - -import sys -import numpy as np -import numpy.ma as ma -import os -import argparse -import math -import calendar -import time -import copy -from datetime import datetime -import json -from pyiodaconv import bufr -from collections import namedtuple -from pyioda import ioda_obs_space as ioda_ospace -from wxflow import Logger -import warnings -# suppress warnings -warnings.filterwarnings('ignore') - - -def Compute_sequenceNumber(lon): - - lon_u, seqNum = np.unique(lon, return_inverse=True) - seqNum = seqNum.astype(np.int32) - logger.debug(f"Len of Sequence Number: {len(seqNum)}") - - return seqNum - - -def bufr_to_ioda(config, logger): - - subsets = config["subsets"] - logger.debug(f"Checking subsets = {subsets}") - - # Get parameters from configuration - data_format = config["data_format"] - source = config["source"] - data_type = config["data_type"] - data_description = config["data_description"] - data_provider = config["data_provider"] - cycle_type = config["cycle_type"] - cycle_datetime = config["cycle_datetime"] - dump_dir = config["dump_directory"] - ioda_dir = config["ioda_directory"] - cycle = config["cycle_datetime"] - - yyyymmdd = cycle[0:8] - hh = cycle[8:10] - - # General Information - converter = 'BUFR to IODA Converter' - platform_description = 'Profiles from TESAC: temperature and salinity' - - bufrfile = f"{cycle_datetime}-{cycle_type}.t{hh}z.{data_format}.tm00.bufr_d" - DATA_PATH = os.path.join(dump_dir, bufrfile) - logger.debug(f"{bufrfile}, {DATA_PATH}") - - # ========================================== - # Make the QuerySet for all the data we want - # ========================================== - start_time = time.time() - - logger.debug(f"Making QuerySet ...") - q = bufr.QuerySet() - - # MetaData - q.add('year', '*/YEAR') - q.add('month', '*/MNTH') - q.add('day', '*/DAYS') - q.add('hour', '*/HOUR') - q.add('minute', '*/MINU') - q.add('ryear', '*/RCYR') - q.add('rmonth', '*/RCMO') - q.add('rday', '*/RCDY') - q.add('rhour', '*/RCHR') - q.add('rminute', '*/RCMI') - q.add('stationID', '*/RPID') - q.add('latitude', '*/CLAT') - q.add('longitude', '*/CLON') - q.add('depth', '*/BTOCN/DBSS') - - # ObsValue - q.add('temp', '*/BTOCN/STMP') - q.add('saln', '*/BTOCN/SALN') - - end_time = time.time() - running_time = end_time - start_time - logger.debug(f"Running time for making QuerySet: {running_time} seconds") - - # ========================================================== - # Open the BUFR file and execute the QuerySet - # Use the ResultSet returned to get numpy arrays of the data - # ========================================================== - start_time = time.time() - logger.debug(f"Executing QuerySet to get ResultSet ...") - with bufr.File(DATA_PATH) as f: - r = f.execute(q) - - # MetaData - logger.debug(f" ... Executing QuerySet: get MetaData ...") - dateTime = r.get_datetime('year', 'month', 'day', 'hour', 'minute', group_by='depth') - dateTime = dateTime.astype(np.int64) - rcptdateTime = r.get_datetime('ryear', 'rmonth', 'rday', 'rhour', 'rminute', group_by='depth') - rcptdateTime = rcptdateTime.astype(np.int64) - stationID = r.get('stationID', group_by='depth') - lat = r.get('latitude', group_by='depth') - lon = r.get('longitude', group_by='depth') - depth = r.get('depth', group_by='depth') - - # ObsValue - logger.debug(f" ... Executing QuerySet: get ObsValue ...") - temp = r.get('temp', group_by='depth') - temp -= 273.15 - saln = r.get('saln', group_by='depth') - - # Add mask based on min, max values - mask = ((temp > -10.0) & (temp <= 50.0)) & ((saln >= 0.0) & (saln <= 45.0)) - lat = lat[mask] - lon = lon[mask] - depth = depth[mask] - stationID = stationID[mask] - dateTime = dateTime[mask] - rcptdateTime = rcptdateTime[mask] - temp = temp[mask] - saln = saln[mask] - - logger.debug(f"Get sequenceNumber based on unique longitude...") - seqNum = Compute_sequenceNumber(lon) - - # ================================== - # Separate TESAC profiles tesac tank - # ================================== - logger.debug(f"Creating the mask for TESAC floats based on station ID ...") - - digit_mask = [item.isdigit() for item in stationID] - indices_true = [index for index, value in enumerate(digit_mask) if value] - - # Apply index - stationID = stationID[indices_true] - lat = lat[indices_true] - lon = lon[indices_true] - depth = depth[indices_true] - temp = temp[indices_true] - saln = saln[indices_true] - seqNum = seqNum[indices_true] - dateTime = dateTime[indices_true] - rcptdateTime = rcptdateTime[indices_true] - - # ObsError - logger.debug(f"Generating ObsError array with constant value (instrument error)...") - ObsError_temp = np.float32(np.ma.masked_array(np.full((len(indices_true)), 0.02))) - ObsError_saln = np.float32(np.ma.masked_array(np.full((len(indices_true)), 0.01))) - - # PreQC - logger.debug(f"Generating PreQC array with 0...") - PreQC = (np.ma.masked_array(np.full((len(indices_true)), 0))).astype(np.int32) - - logger.debug(f" ... Executing QuerySet: Done!") - - logger.debug(f" ... Executing QuerySet: Check BUFR variable generic \ - dimension and type ...") - - # ================================================== - # Check values of BUFR variables, dimension and type - # ================================================== - logger.debug(f" temp min, max, length, dtype = {temp.min()}, {temp.max()}, {len(temp)}, {temp.dtype}") - logger.debug(f" saln min, max, length, dtype = {saln.min()}, {saln.max()}, {len(saln)}, {saln.dtype}") - logger.debug(f" lon min, max, length, dtype = {lon.min()}, {lon.max()}, {len(lon)}, {lon.dtype}") - logger.debug(f" lat min, max, length, dtype = {lat.min()}, {lat.max()}, {len(lat)}, {lat.dtype}") - logger.debug(f" depth min, max, length, dtype = {depth.min()}, {depth.max()}, {len(depth)}, {depth.dtype}") - logger.debug(f" PreQC min, max, length, dtype = {PreQC.min()}, {PreQC.max()}, {len(PreQC)}, {PreQC.dtype}") - logger.debug(f" ObsError_temp min, max, length, dtype = {ObsError_temp.min()}, {ObsError_temp.max()}, {len(ObsError_temp)}, {ObsError_temp.dtype}") - logger.debug(f" ObsError_saln min, max, length, dtype = {ObsError_saln.min()}, {ObsError_saln.max()}, {len(ObsError_saln)}, {ObsError_saln.dtype}") - logger.debug(f" stationID shape, dtype = {stationID.shape}, {stationID.astype(str).dtype}") - logger.debug(f" dateTime shape, dtype = {dateTime.shape}, {dateTime.dtype}") - logger.debug(f" rcptdateTime shape, dytpe = {rcptdateTime.shape}, {rcptdateTime.dtype}") - logger.debug(f" sequence Num shape, dtype = {seqNum.shape}, {seqNum.dtype}") - - # ===================================== - # Create IODA ObsSpace - # Write IODA output - # ===================================== - - # Create the dimensions - dims = {'Location': np.arange(0, lat.shape[0])} - - iodafile = f"{cycle_type}.t{hh}z.insitu_profile_{data_format}.{cycle_datetime}.nc4" - OUTPUT_PATH = os.path.join(ioda_dir, iodafile) - logger.debug(f" ... ... Create OUTPUT file: {OUTPUT_PATH}") - - path, fname = os.path.split(OUTPUT_PATH) - if path and not os.path.exists(path): - os.makedirs(path) - - obsspace = ioda_ospace.ObsSpace(OUTPUT_PATH, mode='w', dim_dict=dims) - - # Create Global attributes - logger.debug(f" ... ... Create global attributes") - obsspace.write_attr('Converter', converter) - obsspace.write_attr('source', source) - obsspace.write_attr('sourceFiles', bufrfile) - obsspace.write_attr('dataProviderOrigin', data_provider) - obsspace.write_attr('description', data_description) - obsspace.write_attr('datetimeRange', [str(dateTime.min()), str(dateTime.max())]) - obsspace.write_attr('platformLongDescription', platform_description) - - # Create IODA variables - logger.debug(f" ... ... Create variables: name, type, units, and attributes") - - # Datetime - obsspace.create_var('MetaData/dateTime', dtype=dateTime.dtype, fillval=dateTime.fill_value) \ - .write_attr('units', 'seconds since 1970-01-01T00:00:00Z') \ - .write_attr('long_name', 'Datetime') \ - .write_data(dateTime) - - # rcptDatetime - obsspace.create_var('MetaData/rcptdateTime', dtype=dateTime.dtype, fillval=dateTime.fill_value) \ - .write_attr('units', 'seconds since 1970-01-01T00:00:00Z') \ - .write_attr('long_name', 'receipt Datetime') \ - .write_data(rcptdateTime) - - # Longitude - obsspace.create_var('MetaData/longitude', dtype=lon.dtype, fillval=lon.fill_value) \ - .write_attr('units', 'degrees_east') \ - .write_attr('valid_range', np.array([-180, 180], dtype=np.float32)) \ - .write_attr('long_name', 'Longitude') \ - .write_data(lon) - - # Latitude - obsspace.create_var('MetaData/latitude', dtype=lat.dtype, fillval=lat.fill_value) \ - .write_attr('units', 'degrees_north') \ - .write_attr('valid_range', np.array([-90, 90], dtype=np.float32)) \ - .write_attr('long_name', 'Latitude') \ - .write_data(lat) - - # Station Identification - obsspace.create_var('MetaData/stationID', dtype=stationID.dtype, fillval=stationID.fill_value) \ - .write_attr('long_name', 'Station Identification') \ - .write_data(stationID) - - # Depth - obsspace.create_var('MetaData/depth', dtype=depth.dtype, fillval=depth.fill_value) \ - .write_attr('units', 'm') \ - .write_attr('long_name', 'Water depth') \ - .write_data(depth) - - # Sequence Number - obsspace.create_var('MetaData/sequenceNumber', dtype=PreQC.dtype, fillval=PreQC.fill_value) \ - .write_attr('long_name', 'Sequence Number') \ - .write_data(seqNum) - - # PreQC - obsspace.create_var('PreQC/waterTemperature', dtype=PreQC.dtype, fillval=PreQC.fill_value) \ - .write_attr('long_name', 'PreQC') \ - .write_data(PreQC) - - obsspace.create_var('PreQC/salinity', dtype=PreQC.dtype, fillval=PreQC.fill_value) \ - .write_attr('long_name', 'PreQC') \ - .write_data(PreQC) - - # ObsError - obsspace.create_var('ObsError/waterTemperature', dtype=ObsError_temp.dtype, fillval=ObsError_temp.fill_value) \ - .write_attr('units', 'degC') \ - .write_attr('long_name', 'ObsError') \ - .write_data(ObsError_temp) - - obsspace.create_var('ObsError/salinity', dtype=ObsError_saln.dtype, fillval=ObsError_saln.fill_value) \ - .write_attr('units', 'psu') \ - .write_attr('long_name', 'ObsError') \ - .write_data(ObsError_saln) - - # ObsValue - obsspace.create_var('ObsValue/waterTemperature', dtype=temp.dtype, fillval=temp.fill_value) \ - .write_attr('units', 'degC') \ - .write_attr('valid_range', np.array([-10.0, 50.0], dtype=np.float32)) \ - .write_attr('long_name', 'water Temperature') \ - .write_data(temp) - - obsspace.create_var('ObsValue/salinity', dtype=saln.dtype, fillval=saln.fill_value) \ - .write_attr('units', 'psu') \ - .write_attr('valid_range', np.array([0.0, 45.0], dtype=np.float32)) \ - .write_attr('long_name', 'salinity') \ - .write_data(saln) - - end_time = time.time() - running_time = end_time - start_time - logger.debug(f"Running time for splitting and output IODA: {running_time} \ - seconds") - - logger.debug(f"All Done!") - - -if __name__ == '__main__': - - start_time = time.time() - - parser = argparse.ArgumentParser() - parser.add_argument('-c', '--config', type=str, help='Input JSON configuration', required=True) - parser.add_argument('-v', '--verbose', help='print debug logging information', - action='store_true') - args = parser.parse_args() - - log_level = 'DEBUG' if args.verbose else 'INFO' - logger = Logger('bufr2ioda_insitu_profiles_tesac.py', level=log_level, colored_log=True) - - with open(args.config, "r") as json_file: - config = json.load(json_file) - - bufr_to_ioda(config, logger) - - end_time = time.time() - running_time = end_time - start_time - logger.debug(f"Total running time: {running_time} seconds") diff --git a/ush/module-setup.sh b/ush/module-setup.sh index 6a2aabf73..a9b87e9e4 100755 --- a/ush/module-setup.sh +++ b/ush/module-setup.sh @@ -63,33 +63,8 @@ elif [[ $MACHINE_ID = gaea* ]] ; then # the module command fails. Hence we actually have to source # /etc/profile here. source /etc/profile - __ms_source_etc_profile=yes - else - __ms_source_etc_profile=no - fi - module purge - # clean up after purge - unset _LMFILES_ - unset _LMFILES_000 - unset _LMFILES_001 - unset LOADEDMODULES - module load modules - if [[ -d /opt/cray/ari/modulefiles ]] ; then - module use -a /opt/cray/ari/modulefiles - fi - if [[ -d /opt/cray/pe/ari/modulefiles ]] ; then - module use -a /opt/cray/pe/ari/modulefiles - fi - if [[ -d /opt/cray/pe/craype/default/modulefiles ]] ; then - module use -a /opt/cray/pe/craype/default/modulefiles - fi - if [[ -s /etc/opt/cray/pe/admin-pe/site-config ]] ; then - source /etc/opt/cray/pe/admin-pe/site-config - fi - if [[ "$__ms_source_etc_profile" == yes ]] ; then - source /etc/profile - unset __ms_source_etc_profile fi + module reset elif [[ $MACHINE_ID = expanse* ]]; then # We are on SDSC Expanse diff --git a/ush/soca/run_jjobs.py b/ush/soca/run_jjobs.py index 83fe4602e..223ce0cc4 100755 --- a/ush/soca/run_jjobs.py +++ b/ush/soca/run_jjobs.py @@ -6,7 +6,7 @@ import argparse from datetime import datetime, timedelta -machines = {"container", "hera", "orion", "hercules"} +machines = {"container", "hera", "orion", "hercules", "wcoss2"} # Assume the default conda environement is gdassapp ENVS = {'JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY': 'eva'} @@ -156,10 +156,9 @@ def _conda_envs(self, jjob): self.f.write(f"set +u \n") self.f.write(f"conda activate {ENVS[jjob]} \n") self.f.write(f"set -u \n") - elif self.machine == "hera": + elif self.machine in ["hera", "wcoss2"]: if jjob in ENVS: - self.f.write(f"module unload GDAS \n") - self.f.write(f"module load {ENVS[jjob].upper()/{self.machine}} \n") + self.f.write(f"module load {ENVS[jjob].upper()}/{self.machine} \n") def precom(self, com, tmpl): cmd = f"RUN={self.RUN} YMD={self.gPDY} HH={self.gcyc} declare_from_tmpl -xr {com}:{tmpl}" diff --git a/ush/soca/soca_vrfy.py b/ush/soca/soca_vrfy.py index 0741e9d28..d80a7782e 100755 --- a/ush/soca/soca_vrfy.py +++ b/ush/soca/soca_vrfy.py @@ -127,7 +127,7 @@ def plotZonalSlice(config): data = xr.open_dataset(config['fields file']) lat_index = np.argmin(np.array(np.abs(np.squeeze(grid.lat)[:, 0]-lat))) slice_data = np.squeeze(np.array(data[variable]))[:, lat_index, :] - depth = np.squeeze(np.array(grid['h']))[:, lat_index, :] + depth = np.squeeze(np.array(data['h']))[:, lat_index, :] depth[np.where(np.abs(depth) > 10000.0)] = 0.0 depth = np.cumsum(depth, axis=0) bounds = config['bounds'] @@ -161,7 +161,7 @@ def plotMeridionalSlice(config): data = xr.open_dataset(config['fields file']) lon_index = np.argmin(np.array(np.abs(np.squeeze(grid.lon)[0, :]-lon))) slice_data = np.squeeze(np.array(data[config['variable']]))[:, :, lon_index] - depth = np.squeeze(np.array(grid['h']))[:, :, lon_index] + depth = np.squeeze(np.array(data['h']))[:, :, lon_index] depth[np.where(np.abs(depth) > 10000.0)] = 0.0 depth = np.cumsum(depth, axis=0) bounds = config['bounds'] diff --git a/utils/CMakeLists.txt b/utils/CMakeLists.txt index 4f376c26a..5cef42cad 100644 --- a/utils/CMakeLists.txt +++ b/utils/CMakeLists.txt @@ -22,3 +22,4 @@ add_subdirectory(test) add_subdirectory(obsproc) add_subdirectory(fv3jedi) add_subdirectory(chem) +add_subdirectory(land) diff --git a/utils/land/CMakeLists.txt b/utils/land/CMakeLists.txt new file mode 100644 index 000000000..cb5295124 --- /dev/null +++ b/utils/land/CMakeLists.txt @@ -0,0 +1,5 @@ +# Ensemble recenter of land +ecbuild_add_executable( TARGET gdasapp_land_ensrecenter.x + SOURCES land_ensrecenter.cc ) +target_compile_features( gdasapp_land_ensrecenter.x PUBLIC cxx_std_17) +target_link_libraries( gdasapp_land_ensrecenter.x PUBLIC NetCDF::NetCDF_CXX oops atlas fv3jedi) diff --git a/utils/land/land_ensrecenter.cc b/utils/land/land_ensrecenter.cc new file mode 100644 index 000000000..02467c836 --- /dev/null +++ b/utils/land/land_ensrecenter.cc @@ -0,0 +1,8 @@ +#include "land_ensrecenter.h" +#include "oops/runs/Run.h" + +int main(int argc, char ** argv) { + oops::Run run(argc, argv); + gdasapp::FV3LandEnsRecenter fv3landensrecenter; + return run.execute(fv3landensrecenter); +} diff --git a/utils/land/land_ensrecenter.h b/utils/land/land_ensrecenter.h new file mode 100644 index 000000000..e67358288 --- /dev/null +++ b/utils/land/land_ensrecenter.h @@ -0,0 +1,174 @@ +#pragma once + +#include +#include +#include +#include +#include + +#include "eckit/config/LocalConfiguration.h" + +#include "oops/base/FieldSet3D.h" +#include "oops/base/GeometryData.h" +#include "oops/mpi/mpi.h" +#include "oops/runs/Application.h" +#include "oops/util/ConfigFunctions.h" +#include "oops/util/DateTime.h" +#include "oops/util/Duration.h" +#include "oops/util/FieldSetHelpers.h" +#include "oops/util/FieldSetOperations.h" +#include "oops/util/Logger.h" + +#include "fv3jedi/Geometry/Geometry.h" +#include "fv3jedi/Increment/Increment.h" +#include "fv3jedi/State/State.h" + + +namespace gdasapp { + /** + * FV3LandEnsRecenter Class Implementation + * + * Generates an analysis increment for the ensemble forecast of land surface vars + * based off of the difference between the forecast ensemble mean and the + * deterministic land surface forecast plus the analysis increment. + */ + + class FV3LandEnsRecenter : public oops::Application { + public: + explicit FV3LandEnsRecenter(const eckit::mpi::Comm & comm = oops::mpi::world()) + : Application(comm) {} + static const std::string classname() {return "gdasapp::FV3LandEnsRecenter";} + + int execute(const eckit::Configuration & fullConfig, bool /*validate*/) const { + /// Setup the FV3 geometry, we are going to assume that things are on the same grid + /// as we do not fully trust OOPS interpolation for land compared to other tools + const eckit::LocalConfiguration geomConfig(fullConfig, "geometry"); + const fv3jedi::Geometry geom(geomConfig, this->getComm()); + + /// Get the valid time + std::string strdt; + fullConfig.get("date", strdt); + util::DateTime cycleDate = util::DateTime(strdt); + + /// Get the list of variables to process + oops::Variables varList(fullConfig, "variables"); + + /// Read the determinstic background + const eckit::LocalConfiguration bkgConfig(fullConfig, "deterministic background"); + fv3jedi::State detbkg(geom, varList, cycleDate); + detbkg.read(bkgConfig); + oops::Log::info() << "Determinstic background: " << std::endl << detbkg << std::endl; + oops::Log::info() << "=========================" << std::endl; + + /// Read the ensemble and get the mean + const eckit::LocalConfiguration ensBkgConfig(fullConfig, "ensemble backgrounds"); + std::vector ensMembers; + int nens = 0; + ensBkgConfig.get("number of members", nens); + std::string pattern; + ensBkgConfig.get("pattern", pattern); + size_t zpad = 0; + ensBkgConfig.get("zero padding", zpad); + eckit::LocalConfiguration ensMemConfig(ensBkgConfig, "template"); + fv3jedi::State ensmean(geom, varList, cycleDate); + ensmean.zero(); + const double rr = 1.0/static_cast(nens); + for (size_t i = 1; i < nens+1; ++i) { + /// replace template as appropriate + if (!pattern.empty()) { + util::seekAndReplace(ensMemConfig, pattern, i, zpad); + } + fv3jedi::State ensmem(geom, varList, cycleDate); + ensmem.read(ensMemConfig); + ensmean.accumul(rr, ensmem); + } + oops::Log::info() << "Ensemble mean background: " << std::endl << ensmean << std::endl; + oops::Log::info() << "=========================" << std::endl; + + /// Read the deterministic increment (if specified) + fv3jedi::Increment detinc(geom, varList, cycleDate); + if (fullConfig.has("deterministic increment")) { + const eckit::LocalConfiguration detIncConfig(fullConfig, "deterministic increment"); + detinc.read(detIncConfig); + } else { + detinc.zero(); // assume no increment + } + oops::Log::info() << "Determinstic increment: " << std::endl << detinc << std::endl; + oops::Log::info() << "=========================" << std::endl; + + /// Difference the deterministic and ensemble mean forecasts + std::string anchor; + anchor = "deterministic"; + if (fullConfig.has("recenter to")) { + fullConfig.get("recenter to", anchor); + } + if (anchor != "deterministic" && anchor != "ensemble mean") { + throw eckit::BadValue("'recenter to' must be 'deterministic' or 'ensemble mean'"); + } + fv3jedi::Increment recenter(geom, varList, cycleDate); + std::string incrstr; + std::string recenterstr; + if (anchor == "deterministic") { + incrstr = "New ensemble mean increment: "; + recenter.diff(detbkg, ensmean); + recenterstr = "Difference between deterministic and ensemble mean forecasts: "; + } else if (anchor == "ensemble mean") { + incrstr = "New deterministic increment: "; + recenter.diff(ensmean, detbkg); + recenterstr = "Difference between ensemble mean and deterministic forecasts: "; + } + oops::Log::info() << recenterstr << std::endl << recenter << std::endl; + oops::Log::info() << "=========================" << std::endl; + /// Add the difference to the deterministic increment + fv3jedi::Increment ensinc(geom, varList, cycleDate); + ensinc.zero(); + ensinc += recenter; + ensinc += detinc; + + /// Mask out the increment (if applicable) + if (fullConfig.has("increment mask")) { + /// Get the configuration + const eckit::LocalConfiguration incrMaskConfig(fullConfig, "increment mask"); + std::string maskvarname; + incrMaskConfig.get("variable", maskvarname); + double minvalue = incrMaskConfig.getDouble("minvalue", -9e36); + double maxvalue = incrMaskConfig.getDouble("maxvalue", 9e36); + oops::Variables maskVars(incrMaskConfig, "variable"); + fv3jedi::State maskbkg(geom, maskVars, cycleDate); + maskbkg.read(bkgConfig); + atlas::FieldSet xbFs; + maskbkg.toFieldSet(xbFs); + /// Create the atlas fieldset for the output increment + atlas::FieldSet ensincFs; + ensinc.toFieldSet(ensincFs); + /// Loop over all points, if the mask is in range, zero out the increments + auto bkgMask = atlas::array::make_view(xbFs[maskvarname]); + for (atlas::idx_t jnode = 0; jnode < bkgMask.shape(0); ++jnode) { + if (bkgMask(jnode, 0) > minvalue && bkgMask(jnode, 0) < maxvalue) { + for (auto & var : varList.variables()) { + auto inc = atlas::array::make_view(ensincFs[var]); + for (atlas::idx_t level = 0; level < ensincFs[var].shape(1); ++level) { + inc(jnode, level) = 0; + } + } + } + } + ensinc.fromFieldSet(ensincFs); + } + + /// Write out the new increment + oops::Log::info() << incrstr << std::endl << ensinc << std::endl; + oops::Log::info() << "=========================" << std::endl; + const eckit::LocalConfiguration outIncConfig(fullConfig, "output increment"); + ensinc.write(outIncConfig); + + return 0; + } + + private: + // ----------------------------------------------------------------------------- + std::string appname() const { + return "gdasapp::FV3LandEnsRecenter"; + } + }; +} // namespace gdasapp diff --git a/utils/obsproc/CMakeLists.txt b/utils/obsproc/CMakeLists.txt index 4e4e3feb6..9cce213e1 100644 --- a/utils/obsproc/CMakeLists.txt +++ b/utils/obsproc/CMakeLists.txt @@ -1 +1,2 @@ +# add_subdirectory(rtofs) add_subdirectory(applications) diff --git a/utils/obsproc/RTOFSInSitu.h b/utils/obsproc/RTOFSInSitu.h new file mode 100644 index 000000000..32b9b967f --- /dev/null +++ b/utils/obsproc/RTOFSInSitu.h @@ -0,0 +1,602 @@ +#pragma once + +#include +// #define _BSD_SOURCE +#include +#include // used in file_exists + +#include +#include +using std::ofstream; +#include // std::get_time +using std::setprecision; +#include +using std::cerr; +using std::endl; +using std::cout; +#include // NOLINT (using C API) +#include +using std::string; +#include +#include + +#include "NetCDFToIodaConverter.h" +#include "oops/util/dateFunctions.h" +#include "oops/util/DateTime.h" + + + + + + + +namespace rtofs +{ + +bool file_exists(const std::string& name); + +void skip8bytes(FILE * f); +int read_int(FILE * f); +void read_float_array(FILE * f, float * a, int n); +void read_int_array(FILE * f, int * a, int n); + +void print_float_array(std::string filename, float * a, int n); +void print_int_array(std::string filename, int * a, int n); +void print_2d_float_array(std::string filename, float ** a, int n, int * dim2); +float * alloc_read_float_array(FILE * f, int n); + + + +class RTOFSOb +{ + public: + RTOFSOb(int n, int mx_lvl, int vrsn); + + typedef char char12[12]; + typedef char char7[7]; + + void read(FILE * f); + int NumberOfObservations() { return n; } + void print(std::string DirectoryName); + + + float * btm; // bottom depth + float * lat; // latitude + float * lon; // longitude + int * ls; // ?? + int * lt; // array of dimensions, the number of levels + int * sal_typ; // ?? salinity type ?? + float * sqc; // salinity qc + int * tmp_typ; // ?? temperature type ?? + float * tqc; // temperature qc + + // observations per level: + float ** lvl; // depth + float ** sal; // salinity (PSU) + float ** sal_err; // salinity error (std deviation, PSU) + float ** sprb; // ?? salinity ... ?? + float ** tmp; // in situ temperature (C) + float ** tmp_err; // in situ temperature error (C) + float ** tprb; // ?? temperature ... ?? + float ** clm_sal; // climatology of salinity + float ** cssd; // climatological std deviation for salinity + float ** clm_tmp; // climatology of temperature + float ** ctsd; // climatological std deviation for temperature + int ** flg; // ?? qc flags ?? + + char12 * dtg; // date (Julian, seconds) + // std::time_t * dtg; // date (Julian, seconds) + + private: + int n; + int mx_lvl; + int vrsn; + + void allocate(); + void allocate2d(); +}; // class RTOFSOb + + + +class RTOFSDataFile +{ + public: + explicit RTOFSDataFile(std::string filename); + int NumberOfObservations() { return nobs; } + RTOFSOb & observations() { return * ob; } + + + private: + std::string filename; + int nobs; + FILE * f; + RTOFSOb * ob; + + void read_file(); +}; // class RTOFSDataFile + +} // namespace rtofs + + + + +namespace gdasapp +{ + + +int64_t + SecondsSinceReferenceTime(rtofs::RTOFSOb::char12 time) +{ + // parse the date + std::string s(time); + int year = std::stoi(s.substr(0, 4)); + int month = std::stoi(s.substr(4, 2)); + int day = std::stoi(s.substr(6, 2)); + int hour = std::stoi(s.substr(8, 2)); + int minute = std::stoi(s.substr(10, 2)); + int second = 0; + + uint64_t julianDate = + util::datefunctions::dateToJulian(year, month, day); + + // 2440588 = Julian date for January 1, 1970. + int daysSinceEpoch = julianDate - 2440588; + int secondsOffset = util::datefunctions::hmsToSeconds(hour, minute, second); + return static_cast(daysSinceEpoch * 86400.0f) + secondsOffset; +} // SecondsSinceReferenceTime + + + +class RTOFSInSitu: + public NetCDFToIodaConverter +{ + public: + explicit RTOFSInSitu( + const eckit::Configuration & fullConfig, + const eckit::mpi::Comm & comm): + NetCDFToIodaConverter(fullConfig, comm) + { + variable_ = "waterTemperature"; + } + + // Read binary file and populate iodaVars + gdasapp::obsproc::iodavars::IodaVars + providerToIodaVars(const std::string fileName) final + { + oops::Log::info() + << "Processing files provided by RTOFS" + << std::endl; + + // Set the int metadata names + std::vector intMetadataNames = {"temperature"}; + + // Set the float metadata name + std::vector floatMetadataNames = {}; + + oops::Log::info() + << "Processing file " + << fileName + << std::endl; + + // read the file + rtofs::RTOFSDataFile RTOFSFile(fileName); + int n = RTOFSFile.NumberOfObservations(); + rtofs::RTOFSOb & ob = RTOFSFile.observations(); + + int NumberOfTemperatureValues = 0; + for (int i = 0; i < n; i ++) + NumberOfTemperatureValues += ob.lt[i]; + + gdasapp::obsproc::iodavars::IodaVars iodaVars( + NumberOfTemperatureValues, + floatMetadataNames, + intMetadataNames); + iodaVars.referenceDate_ = "seconds since 1970-01-01T00:00:00Z"; + + int k = 0; + for (int i = 0; i < n; i ++) + { + int64_t date = SecondsSinceReferenceTime(ob.dtg[i]); + + for (int j = 0; j < ob.lt[i]; j ++) + { + iodaVars.longitude_(k) = ob.lon[i]; + iodaVars.latitude_(k) = ob.lat[i]; + iodaVars.obsVal_(k) = ob.tmp[i][j]; + iodaVars.obsError_(k) = ob.tmp_err[i][j]; + iodaVars.datetime_(k) = date; + // iodaVars.preQc_(k) = oneDimFlagsVal[i]; + // iodaVars.intMetadata_.row(k) << -999; + + k++; + } + } + + return iodaVars; + }; // providerToIodaVars +}; // class RTOFSInSitu + + +} // namespace gdasapp + + + +namespace rtofs +{ + +// the variables marked "skipped" are arrays which are present +// in the binary file, but were skipped by the fortran code +// that was reading the files. + +RTOFSOb:: + RTOFSOb(int n, int mx_lvl, int vrsn): + n(n), + mx_lvl(mx_lvl), + vrsn(vrsn) +{ + allocate(); +} + + + +void +RTOFSOb:: + allocate() +{ + dtg = new char12[n]; + + lat = new float[n]; + lon = new float[n]; + btm = new float[n]; + ls = new int[n]; + lt = new int[n]; + sal_typ = new int[n]; + sqc = new float[n]; + tmp_typ = new int[n]; + tqc = new float[n]; + + lvl = new float * [n]; + sal = new float * [n]; + sal_err = new float * [n]; + sprb = new float * [n]; + tmp = new float * [n]; + tmp_err = new float * [n]; + clm_sal = new float * [n]; // skipped? + tprb = new float * [n]; + cssd = new float * [n]; + clm_tmp = new float * [n]; // skipped? + ctsd = new float * [n]; + flg = new int * [n]; +} + + +void +RTOFSOb:: + allocate2d() +{ + for (int i = 0; i < n; i ++) + { + int k = lt[i]; + + lvl[i] = new float[k]; + sal[i] = new float[k]; + sal_err[i] = new float[k]; + sprb[i] = new float[k]; + tmp[i] = new float[k]; + tmp_err[i] = new float[k]; + tprb[i] = new float[k]; + clm_sal[i] = new float[k]; + cssd[i] = new float[k]; + clm_tmp[i] = new float[k]; + ctsd[i] = new float[k]; + flg[i] = new int[k]; + } +} + + + +void +RTOFSOb:: + read(FILE * f) +{ + read_float_array(f, btm, n); + read_float_array(f, lat, n); + read_float_array(f, lon, n); + read_int_array(f, ls, n); + read_int_array(f, lt, n); + read_int_array(f, sal_typ, n); + read_float_array(f, sqc, n); + read_int_array(f, tmp_typ, n); + read_float_array(f, tqc, n); + + allocate2d(); + + for (int i = 0; i < n; i ++) + { + int k = lt[i]; + + read_float_array(f, lvl[i], k); + read_float_array(f, sal[i], k); + read_float_array(f, sal_err[i], k); + read_float_array(f, sprb[i], k); + read_float_array(f, tmp[i], k); + read_float_array(f, tmp_err[i], k); + read_float_array(f, tprb[i], k); + read_float_array(f, clm_sal[i], k); + read_float_array(f, cssd[i], k); + read_float_array(f, clm_tmp[i], k); + read_float_array(f, ctsd[i], k); + read_int_array(f, flg[i], k); + } +} + + +void +RTOFSOb:: + print(std::string DirectoryName) +{ + if (!file_exists(DirectoryName)) + { + cerr << "Directory " << DirectoryName << "doesn't exist" << endl; + exit(1); + } + print_float_array(DirectoryName + "/latitude", lat, n); + print_float_array(DirectoryName + "/longitude", lon, n); + print_float_array(DirectoryName + "/btm", btm, n); + print_float_array(DirectoryName + "/tqc", tqc, n); + print_float_array(DirectoryName + "/sqc", sqc, n); + print_int_array(DirectoryName + "/lt", lt, n); + print_int_array(DirectoryName + "/ls", ls, n); + print_int_array(DirectoryName + "/sal_typ", sal_typ, n); + print_int_array(DirectoryName + "/tmp_typ", sal_typ, n); + + print_2d_float_array(DirectoryName + "/tmp", tmp, n, lt); + print_2d_float_array(DirectoryName + "/sal", sal, n, lt); + + // print lvl2d array + { + ofstream o; + o.open(DirectoryName + "/lvl2d"); + for (int i = 0; i < n; i ++) + for (int j = 0; j < lt[i]; j ++) + o + << j + << endl; + o.close(); + } +} // RTOFSOb::print + + + +RTOFSDataFile:: +RTOFSDataFile(std::string filename): + filename(filename) +{ + if (!file_exists(filename)) + { + cerr << "File not found" << endl; + exit(1); + } + + + const char * fn = filename.c_str(); + f = fopen(fn, "rb"); + if (!f) + { + cerr << "Error opening file " << fn << endl; + exit(1); + } + + read_file(); +} // RTOFSDataFile::RTOFSDataFile + + +void +RTOFSDataFile:: + read_file() +{ + fseek(f, 4, SEEK_CUR); + + int n_read = read_int(f); + int mx_lvl = read_int(f); + int vrsn = read_int(f); + + ob = new RTOFSOb(n_read, mx_lvl, vrsn); + nobs = n_read; + + ob->read(f); + + skip8bytes(f); + + fread(ob->dtg, sizeof(RTOFSOb::char12), n_read, f); + + skip8bytes(f); + + RTOFSOb::char12 * ob_rct = new RTOFSOb::char12[n_read]; + fread(ob_rct, sizeof(RTOFSOb::char12), n_read, f); + + skip8bytes(f); + + RTOFSOb::char7 * ob_sgn = new RTOFSOb::char7[n_read]; + fread(ob_sgn, sizeof(RTOFSOb::char7), n_read, f); + + if (vrsn == 2) + { + float ** glb_sal = new float * [n_read]; + for (int i = 0; i < n_read; i ++) + { + int k = ob->lt[i]; + glb_sal[i] = alloc_read_float_array(f, k); + } + } +} // read_file + + + +void skip8bytes(FILE * f) +{ + fseek(f, 8, SEEK_CUR); +} + + +void + read_float_array(FILE * f, float * a, int n) +{ + skip8bytes(f); + + fread(a, sizeof(float), n, f); + + uint32_t * data = reinterpret_cast(a); + + for (int i = 0; i < n; i ++) + data[i] = be32toh(data[i]); +} // read_float_array + + +void + read_int_array(FILE * f, int * a, int n) +{ + skip8bytes(f); + + fread(a, sizeof(int), n, f); + + uint32_t * data = reinterpret_cast(a); + + for (int i = 0; i < n; i ++) + data[i] = be32toh(data[i]); +} // read_int_array + + + +void + print_level(std::string filename, float ** a, int n, int level) +{ + ofstream o(filename); + o + << std::fixed + << std::setprecision(9); + + for (int i = 0; i < n; i ++) + o + << a[i] [level] + << endl; + + o.close(); +} + + +void + print_int_array(std::string filename, int * a, int n) +{ + ofstream o(filename); + for (int i = 0; i < n; i ++) + o + << a[i] + << endl; + o.close(); +} + + +void + print_float_array(std::string filename, float * a, int n) +{ + ofstream o(filename); + o + << std::fixed + << std::setprecision(9); + for (int i = 0; i < n; i ++) + o + << a[i] + << endl; +} + +void + print_2d_float_array(std::string filename, float ** a, int n, int * dim2) +{ + ofstream o(filename); + o + << std::fixed + << std::setprecision(9); + for (int i = 0; i < n; i ++) + for (int j = 0; j < dim2[i]; j ++) + o + << a[i] [j] + << endl; +} + + +bool + file_exists(const std::string& name) +{ + struct stat buffer; + return (stat (name.c_str(), &buffer) == 0); +} + + +int + read_int(FILE * f) +{ + int dummy; + fread(&dummy, sizeof(int), 1, f); + dummy = static_cast(be32toh(dummy)); + return dummy; +} // read_int + + + +void + read_real_array(FILE * f, float * a, int n) +{ + fread(a, sizeof(float), n, f); + + uint32_t * data = reinterpret_cast(a); + + for (int i = 0; i < n; i ++) + data[i] = be32toh(data[i]); +} // read_real_array + + +float * + alloc_read_float_array(FILE * f, int n) +{ + float * a = new float[n]; + skip8bytes(f); + read_real_array(f, a, n); + return a; +} + + +void + print_int_array(int * a, int n) +{ + for (int i = 0; i < n; i ++) + cout + << a[i] + << endl; +} + + +int * + alloc_read_int_array(FILE * f, int n) +{ + int * a = new int[n]; + skip8bytes(f); + read_int_array(f, a, n); + return a; +} + + +void + print_real_array(float * a, int n) +{ + cout + << std::fixed + << std::setprecision(9); + for (int i = 0; i < n; i ++) + cout + << a[i] + << endl; +} + + +} // namespace rtofs diff --git a/utils/obsproc/Viirsaod2Ioda.h b/utils/obsproc/Viirsaod2Ioda.h index 3bcdb26b0..086caf185 100644 --- a/utils/obsproc/Viirsaod2Ioda.h +++ b/utils/obsproc/Viirsaod2Ioda.h @@ -133,7 +133,8 @@ namespace gdasapp { std::vector> mask_s; if ( fullConfig_.has("binning") ) { - // deal with longitude when points cross the international date line + // Do superobbing + // Deal with longitude when points cross the international date line float minLon = std::numeric_limits::max(); float maxLon = std::numeric_limits::min(); @@ -158,19 +159,17 @@ namespace gdasapp { } } - lat2d_s = gdasapp::superobutils::subsample2D(lat, mask, fullConfig_); mask_s = gdasapp::superobutils::subsample2D(mask, mask, fullConfig_); if (fullConfig_.has("binning.cressman radius")) { - // Weighted-average (cressman) does not work yet. Need to develop subsample2D_cressman - // function to superob.h or incorporate weighted average to subsample2D function. - // obsvalue_s = gdasapp::superobutils::subsample2D_cressman(obsvalue, lat, lon, - // lat2d_s, lon2d_s, mask, fullConfig_); - // obserror_s = gdasapp::superobutils::subsample2D_cressman(obserror, lat, lon, - // lat2d_s, lon2d_s, mask, fullConfig_); - obsvalue_s = gdasapp::superobutils::subsample2D(obsvalue, mask, fullConfig_); - obserror_s = gdasapp::superobutils::subsample2D(obserror, mask, fullConfig_); + // Weighted-average (cressman) superob + bool useCressman = true; + obsvalue_s = gdasapp::superobutils::subsample2D(obsvalue, mask, fullConfig_, + useCressman, lat, lon, lat2d_s, lon2d_s); + obserror_s = gdasapp::superobutils::subsample2D(obserror, mask, fullConfig_, + useCressman, lat, lon, lat2d_s, lon2d_s); } else { + // Simple-average superob obsvalue_s = gdasapp::superobutils::subsample2D(obsvalue, mask, fullConfig_); obserror_s = gdasapp::superobutils::subsample2D(obserror, mask, fullConfig_); } diff --git a/utils/obsproc/applications/CMakeLists.txt b/utils/obsproc/applications/CMakeLists.txt index 04c7ad3b7..f03631855 100644 --- a/utils/obsproc/applications/CMakeLists.txt +++ b/utils/obsproc/applications/CMakeLists.txt @@ -1,9 +1,15 @@ list( APPEND gdasapp_provider2ioda_src_files - gdas_obsprovider2ioda.cc - gdas_obsprovider2ioda.h - ) + gdas_obsprovider2ioda.cc + gdas_obsprovider2ioda.h +) + ecbuild_add_executable( TARGET gdas_obsprovider2ioda.x SOURCES ${gdasapp_provider2ioda_src_files} ) target_compile_features( gdas_obsprovider2ioda.x PUBLIC cxx_std_17) + target_link_libraries( gdas_obsprovider2ioda.x PUBLIC oops ioda NetCDF::NetCDF_CXX) + +# to be used when rtofs is a static library: +# link_directories(${CMAKE_SOURCE_DIR}/rtofs) +# target_link_libraries( gdas_obsprovider2ioda.x PRIVATE rtofs) diff --git a/utils/obsproc/applications/gdas_obsprovider2ioda.h b/utils/obsproc/applications/gdas_obsprovider2ioda.h index 2fef42183..b3bcd5c89 100644 --- a/utils/obsproc/applications/gdas_obsprovider2ioda.h +++ b/utils/obsproc/applications/gdas_obsprovider2ioda.h @@ -9,6 +9,7 @@ #include "../Ghrsst2Ioda.h" #include "../IcecAmsr2Ioda.h" #include "../Rads2Ioda.h" +#include "../RTOFSInSitu.h" #include "../Smap2Ioda.h" #include "../Smos2Ioda.h" #include "../Viirsaod2Ioda.h" @@ -31,6 +32,9 @@ namespace gdasapp { } else if (provider == "GHRSST") { Ghrsst2Ioda conv2ioda(fullConfig, this->getComm()); conv2ioda.writeToIoda(); + } else if (provider == "RTOFS") { + RTOFSInSitu conv2ioda(fullConfig, this->getComm()); + conv2ioda.writeToIoda(); } else if (provider == "SMAP") { Smap2Ioda conv2ioda(fullConfig, this->getComm()); conv2ioda.writeToIoda(); diff --git a/utils/obsproc/superob.h b/utils/obsproc/superob.h index 413527ea2..759014f1a 100644 --- a/utils/obsproc/superob.h +++ b/utils/obsproc/superob.h @@ -4,15 +4,26 @@ #include #include +#include "atlas/util/Earth.h" +#include "atlas/util/Geometry.h" +#include "atlas/util/Point.h" #include "eckit/config/LocalConfiguration.h" + + namespace gdasapp { namespace superobutils { // Function to perform subsampling/binning of gridded data with a given stride template std::vector> subsample2D(const std::vector>& inputArray, const std::vector>& mask, - const eckit::Configuration & fullConfig) { + const eckit::Configuration & fullConfig, + bool useCressman = false, + const std::vector>& inputlat = {}, + const std::vector>& inputlon = {}, + const std::vector>& targetlat = {}, + const std::vector>& targetlon = {} +) { // Get the binning configuration int stride; int minNumObs; @@ -31,30 +42,73 @@ namespace gdasapp { // Perform subsampling T sum; int count; - for (int i = 0; i < subsampledRows; ++i) { - for (int j = 0; j < subsampledCols; ++j) { - count = 0; - sum = static_cast(0); - // Compute the average within the stride - for (int si = 0; si < stride; ++si) { - for (int sj = 0; sj < stride; ++sj) { - int row = i * stride + si; - int col = j * stride + sj; - if (row < numRows && col < numCols && mask[row][col] == 1) { - sum += inputArray[row][col]; - count++; + if (!useCressman) { + for (int i = 0; i < subsampledRows; ++i) { + for (int j = 0; j < subsampledCols; ++j) { + count = 0; + sum = static_cast(0); + // Compute the average within the stride + for (int si = 0; si < stride; ++si) { + for (int sj = 0; sj < stride; ++sj) { + int row = i * stride + si; + int col = j * stride + sj; + if (row < numRows && col < numCols && mask[row][col] == 1) { + sum += inputArray[row][col]; + count++; + } } } + + // Calculate the average and store it in the subsampled array + if ( count < minNumObs ) { + subsampled[i][j] = static_cast(-9999); + } else { + subsampled[i][j] = sum / static_cast(count); + } } + } + } else { + // Apply Cressman interpolation if selected + // Perform Cressman interpolation + double cressmanRadius; + fullConfig.get("binning.cressman radius", cressmanRadius); + for (int i = 0; i < subsampledRows; ++i) { + for (int j = 0; j < subsampledCols; ++j) { + // Initialize sum and sumWeights for Cressman interpolation + count = 0; + sum = static_cast(0); + T sumWeights = static_cast(0); + // Loop through neighboring points for interpolation + for (int si = 0; si < stride; ++si) { + for (int sj = 0; sj < stride; ++sj) { + int row = i * stride + si; + int col = j * stride + sj; + if (row < numRows && col < numCols && mask[row][col] == 1) { + atlas::PointLonLat p0(inputlon[row][col], inputlat[row][col]); + atlas::PointLonLat p1(targetlon[i][j], targetlat[i][j]); + double distance = atlas::util::Earth::distance(p0, p1)/1000.0; - // Calculate the average and store it in the subsampled array - if ( count < minNumObs ) { - subsampled[i][j] = static_cast(-9999); - } else { - subsampled[i][j] = sum / static_cast(count); + double distance_sq = distance * distance; + double cressmanRadius_sq = cressmanRadius * cressmanRadius; + double weight = (distance <= cressmanRadius) ? (cressmanRadius_sq - distance_sq ) + / (cressmanRadius_sq + distance_sq) : 0.0; + sum += inputArray[row][col] * weight; + sumWeights += weight; + count++; + } + } + } + + // Update subsampled value with Cressman interpolation + if (count < minNumObs || sumWeights == 0.0) { + subsampled[i][j] = static_cast(-9999); + } else { + subsampled[i][j] = sum / static_cast(sumWeights); + } } } } + std::cout << " done subsampling" << std::endl; return subsampled; } diff --git a/utils/test/CMakeLists.txt b/utils/test/CMakeLists.txt index fcbb26e37..2d912c2d2 100644 --- a/utils/test/CMakeLists.txt +++ b/utils/test/CMakeLists.txt @@ -3,6 +3,7 @@ list( APPEND utils_test_input testinput/gdas_meanioda.yaml testinput/gdas_rads2ioda.yaml testinput/gdas_ghrsst2ioda.yaml + testinput/gdas_rtofsinsitu.yaml testinput/gdas_smap2ioda.yaml testinput/gdas_smos2ioda.yaml testinput/gdas_icecamsr2ioda.yaml @@ -11,6 +12,7 @@ list( APPEND utils_test_input set( gdas_utils_test_ref testref/ghrsst2ioda.test + # testref/rtofsinsitu.test testref/rads2ioda.test testref/smap2ioda.test testref/smos2ioda.test @@ -63,6 +65,15 @@ ecbuild_add_test( TARGET test_gdasapp_util_ghrsst2ioda LIBS gdas-utils WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/obsproc) +#TODO: +# add binary files for the test +# Test the RTOFS in Situ converter +# ecbuild_add_test( TARGET test_gdasapp_util_rtofsinsitu + # COMMAND ${CMAKE_BINARY_DIR}/bin/gdas_obsprovider2ioda.x + # ARGS "../testinput/gdas_rtofsinsitu.yaml" + # LIBS gdas-utils + # WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/obsproc) + # Test the SMAP to IODA converter ecbuild_add_test( TARGET test_gdasapp_util_smap2ioda COMMAND ${CMAKE_BINARY_DIR}/bin/gdas_obsprovider2ioda.x diff --git a/utils/test/testinput/gdas_rtofsinsitu.yaml b/utils/test/testinput/gdas_rtofsinsitu.yaml new file mode 100644 index 000000000..4c8684d9d --- /dev/null +++ b/utils/test/testinput/gdas_rtofsinsitu.yaml @@ -0,0 +1,13 @@ +provider: RTOFS +window begin: 2021-03-24T15:00:00Z +window end: 2021-03-24T21:00:00Z +output file: rtofsinsitu_2024032600.profile.nc4 +input files: +- rtofsinsitu_2024032600.profile +# - 2024032600.profile +# - 2024032600.sfc + +# test: + # reference filename: testref/rtofsinsitu.test + # test output filename: testoutput/rtofsinsitu.test + # float relative tolerance: 1e-6