From 96ba7a8307319a28e156676ff0b787c50a14cd05 Mon Sep 17 00:00:00 2001 From: Trygve Aspelien Date: Mon, 18 Sep 2023 12:14:29 +0200 Subject: [PATCH] Add cycolim/sentinel as an observation set --- pysurfex/cli.py | 27 +-- pysurfex/cmd_parsing.py | 9 + pysurfex/obs.py | 210 --------------------- pysurfex/pseudoobs.py | 345 +++++++++++++++++++++++++++++++++++ tests/conftest.py | 71 +++++++ tests/smoke/test_cli_misc.py | 73 +------- tests/unit/test_cryoclim.py | 41 +++++ 7 files changed, 483 insertions(+), 293 deletions(-) create mode 100644 pysurfex/pseudoobs.py create mode 100644 tests/unit/test_cryoclim.py diff --git a/pysurfex/cli.py b/pysurfex/cli.py index a224d8b..9ed10dd 100644 --- a/pysurfex/cli.py +++ b/pysurfex/cli.py @@ -69,9 +69,10 @@ read_sentinel_nc, write_analysis_netcdf_file, ) -from .obs import Observation, sm_obs_sentinel, snow_pseudo_obs_cryoclim +from .obs import Observation from .obsmon import write_obsmon_sqlite_file from .platform_deps import SystemFilePathsFromFile +from .pseudoobs import CryoclimObservationSet, SentinelObservationSet from .read import ConvertedInput, Converter from .run import BatchJob, Masterodb, PerturbedOffline, SURFEXBinary from .titan import ( @@ -1257,12 +1258,9 @@ def sentinel_obs(argv=None): varname = kwargs["varname"] indent = kwargs["indent"] - grid_lons, grid_lats, grid_sm_class = read_sentinel_nc(infiles) fg_geo, validtime, grid_sm_fg, __, __ = read_first_guess_netcdf_file(fg_file, varname) - q_c = sm_obs_sentinel( - validtime, grid_sm_class, grid_lons, grid_lats, step, fg_geo, grid_sm_fg - ) - q_c.write_output(output, indent=indent) + obs_set = SentinelObservationSet(infiles, validtime, fg_geo, grid_sm_fg, step=step) + obs_set.write_json_file(output, indent=indent) def qc2obsmon(argv=None): @@ -1612,7 +1610,7 @@ def cryoclim_pseudoobs(argv=None): ) else: logging.basicConfig( - format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO + format="%(asctime)s %(levelname,)s %(message)s", level=logging.INFO ) logging.info("************ cryoclim_pseudoobs ******************") @@ -1622,15 +1620,22 @@ def cryoclim_pseudoobs(argv=None): output = kwargs["output"] varname = kwargs["varname"] indent = kwargs["indent"] + laf_threshold = kwargs["laf_threshold"] - grid_lons, grid_lats, grid_snow_class = read_cryoclim_nc(infiles) fg_geo, validtime, grid_snow_fg, glafs, gelevs = read_first_guess_netcdf_file( fg_file, varname ) - q_c = snow_pseudo_obs_cryoclim( - validtime, grid_snow_class, grid_lons, grid_lats, step, fg_geo, grid_snow_fg, gelevs + obs_set = CryoclimObservationSet( + infiles, + validtime, + fg_geo, + grid_snow_fg, + gelevs, + step=step, + glaf=glafs, + laf_threshold=laf_threshold, ) - q_c.write_output(output, indent=indent) + obs_set.write_json_file(output, indent=indent) def create_namelist(argv=None): diff --git a/pysurfex/cmd_parsing.py b/pysurfex/cmd_parsing.py index 48d24d4..db8d5df 100644 --- a/pysurfex/cmd_parsing.py +++ b/pysurfex/cmd_parsing.py @@ -2024,6 +2024,15 @@ def parse_cryoclim_pseudoobs(argv): default=None, required=False, ) + parser.add_argument( + "-gt", + "--laf_threshold", + dest="laf_threshold", + type=float, + help="LandAreaFraction threshold", + default=0.1, + required=False, + ) if len(argv) == 0: parser.print_help() diff --git a/pysurfex/obs.py b/pysurfex/obs.py index aab5f2d..021bcd9 100644 --- a/pysurfex/obs.py +++ b/pysurfex/obs.py @@ -811,213 +811,3 @@ def __init__(self, an_time, filename, label=""): ) ObservationSet.__init__(self, observations, label=label) - - -def snow_pseudo_obs_cryoclim( - validtime, - grid_snow_class, - grid_lons, - grid_lats, - step, - fg_geo, - grid_snow_fg, - gelevs_fg, - fg_threshold=.4, - new_snow_depth=0.1, -): - """Cryoclim snow. - - Args: - validtime (_type_): _description_ - grid_snow_class (_type_): _description_ - grid_lons (_type_): _description_ - grid_lats (_type_): _description_ - step (_type_): _description_ - fg_geo (_type_): _description_ - grid_snow_fg (_type_): _description_ - fg_threshold (float, optional): _description_. Defaults to 2.0. - new_snow_depth (float, optional): _description_. Defaults to 0.01. - - Returns: - _type_: _description_ - """ - n_x = grid_lons.shape[0] - n_y = grid_lons.shape[1] - - n_x = int(n_x / step) - n_y = int(n_y / step) - - # TODO rewrite to use lonlatvals geo - counter = 0 - iii = 0 - res_lons = [] - res_lats = [] - p_snow_class = {} - for __ in range(0, n_x): - jjj = 0 - for __ in range(0, n_y): - - if ( ( (grid_snow_class[0,iii,jjj] == 2) or (grid_snow_class[0,iii,jjj] == 1) ) and ((grid_lats[iii,jjj] < 90) and (grid_lats[iii,jjj] > 0.0)) and ((grid_lons[iii,jjj] < 180) and (grid_lons[iii,jjj] > -180.0)) ): - res_lons.append(grid_lons[iii, jjj]) - res_lats.append(grid_lats[iii, jjj]) - p_snow_class.update({str(counter): grid_snow_class[0, iii, jjj]}) - print(grid_snow_class[0, iii, jjj]) - counter = counter + 1 - jjj = jjj + step - iii = iii + step - - p_fg_snow_depth = gridpos2points( - fg_geo.lons, fg_geo.lats, np.asarray(res_lons), np.asarray(res_lats), grid_snow_fg) - p_fg_elevs = gridpos2points( - fg_geo.lons, fg_geo.lats, np.asarray(res_lons), np.asarray(res_lats), gelevs_fg) - in_grid = inside_grid( - fg_geo.lons, - fg_geo.lats, - np.asarray(res_lons), - np.asarray(res_lats), - distance=2500.0, - ) - - # Ordering of points must be the same..... - obs = [] - flags = [] - cis = [] - lafs = [] - providers = [] - for i in range(0, p_fg_snow_depth.shape[0]): - - p_snow_fg = p_fg_snow_depth[i] - logging.debug("%s %s %s %s", i, p_snow_fg, res_lons[i], res_lats[i]) - if not np.isnan(p_snow_fg): - # Check if in grid - if in_grid[i]: - obs_value = np.nan - try: - snow_class_val = p_snow_class[str(i)] - except KeyError: - continue - if snow_class_val == 2: - if p_snow_fg > 0: - if fg_threshold is not None: - if p_snow_fg <= fg_threshold: - obs_value = p_snow_fg - else: - obs_value = p_snow_fg - else: - obs_value = new_snow_depth - elif snow_class_val == 1: - if p_snow_fg >= 0.0: - obs_value = 0.0 - - if not np.isnan(obs_value): - flags.append(0) - cis.append(0) - lafs.append(0) - providers.append(0) - obs.append( - Observation(validtime, res_lons[i], res_lats[i], obs_value,p_fg_elevs[i], varname="totalSnowDepth") - ) - - logging.info("Possible pseudo-observations: %s", n_x * n_y) - logging.info("Pseudo-observations created: %s", len(obs)) - return QCDataSet(validtime, obs, flags, cis, lafs, providers) - - -def sm_obs_sentinel( - validtime, - grid_sm_class, - grid_lons, - grid_lats, - step, - fg_geo, - grid_sm_fg, - fg_threshold=1.0, -): - """Sentinel. - - Args: - validtime (_type_): _description_ - grid_sm_class (_type_): _description_ - grid_lons (_type_): _description_ - grid_lats (_type_): _description_ - step (_type_): _description_ - fg_geo (_type_): _description_ - grid_sm_fg (_type_): _description_ - fg_threshold (_type_, optional): _description_. Defaults to 1.. - - Returns: - _type_: _description_ - - """ - n_x = grid_lons.shape[0] - n_y = grid_lons.shape[1] - - n_x = int(n_x / step) - n_y = int(n_y / step) - - # TODO rewrite to use lonlatvals geo - counter = 0 - iii = 0 - res_lons = [] - res_lats = [] - p_sm_class = {} - for __ in range(0, n_x): - jjj = 0 - for __ in range(0, n_y): - res_lons.append(grid_lons[iii, jjj]) - res_lats.append(grid_lats[iii, jjj]) - p_sm_class.update({str(counter): grid_sm_class[iii, jjj]}) - counter = counter + 1 - jjj = jjj + step - iii = iii + step - - p_fg_sm = gridpos2points( - fg_geo.lons, fg_geo.lats, np.asarray(res_lons), np.asarray(res_lats), grid_sm_fg - ) - in_grid = inside_grid( - fg_geo.lons, - fg_geo.lats, - np.asarray(res_lons), - np.asarray(res_lats), - distance=2500.0, - ) - # Ordering of points must be the same..... - obs = [] - flags = [] - cis = [] - lafs = [] - providers = [] - for i in range(0, p_fg_sm.shape[0]): - - p_sm_fg = p_fg_sm[i] - if not np.isnan(p_sm_fg): - # Check if in grid - if in_grid[i]: - obs_value = np.nan - if (p_sm_class[str(i)] > 1) or (p_sm_class[str(i)] < 0): - if p_sm_fg <= fg_threshold: - obs_value = p_sm_fg - else: - obs_value = 999 - - else: - obs_value = p_sm_class[str(i)] - - if not np.isnan(obs_value): - flags.append(0) - cis.append(0) - lafs.append(0) - providers.append(0) - obs.append( - Observation( - validtime, - res_lons[i], - res_lats[i], - obs_value, - varname="surface_soil_moisture", - ) - ) - - logging.info("Possible pseudo-observations: %s", n_x * n_y) - logging.info("Pseudo-observations created: %s", len(obs)) - return QCDataSet(validtime, obs, flags, cis, lafs, providers) diff --git a/pysurfex/pseudoobs.py b/pysurfex/pseudoobs.py new file mode 100644 index 0000000..09724c6 --- /dev/null +++ b/pysurfex/pseudoobs.py @@ -0,0 +1,345 @@ +"""Pseuodo-obs""" + + +import logging + +import numpy as np + +from .interpolation import gridpos2points, inside_grid +from .netcdf import read_cryoclim_nc, read_sentinel_nc +from .obs import ObservationSet +from .observation import Observation + + +def snow_pseudo_obs_cryoclim( + validtime, + grid_snow_class, + grid_lons, + grid_lats, + step, + fg_geo, + grid_snow_fg, + gelevs_fg, + fg_threshold=0.4, + new_snow_depth=0.1, + glaf=None, + laf_threshold=0.1, +): + """Cryoclim snow. + + Args: + validtime (_type_): _description_ + grid_snow_class (_type_): _description_ + grid_lons (_type_): _description_ + grid_lats (_type_): _description_ + step (_type_): _description_ + fg_geo (_type_): _description_ + grid_snow_fg (_type_): _description_ + fg_threshold (float, optional): _description_. Defaults to 2.0. + new_snow_depth (float, optional): _description_. Defaults to 0.01. + glaf (np.ndarray, optional): LandAreaFraction. Defaults to None + laf_threshold(float): Threshold to remove points. Defaults to 0.1., + Returns: + list: List of observation objects + """ + n_x = grid_lons.shape[0] + n_y = grid_lons.shape[1] + + n_x = int(n_x / step) + n_y = int(n_y / step) + + # TODO rewrite to use lonlatvals geo + counter = 0 + iii = 0 + res_lons = [] + res_lats = [] + p_snow_class = {} + for __ in range(0, n_x): + jjj = 0 + for __ in range(0, n_y): + if ( + ( + (grid_snow_class[0, iii, jjj] == 2) + or (grid_snow_class[0, iii, jjj] == 1) + ) + and ((grid_lats[iii, jjj] < 90) and (grid_lats[iii, jjj] > 0.0)) + and ((grid_lons[iii, jjj] < 180) and (grid_lons[iii, jjj] > -180.0)) + ): + res_lons.append(np.float64(grid_lons[iii, jjj])) + res_lats.append(np.float64(grid_lats[iii, jjj])) + p_snow_class.update({str(counter): grid_snow_class[0, iii, jjj]}) + counter = counter + 1 + jjj = jjj + step + iii = iii + step + + p_fg_snow_depth = gridpos2points( + fg_geo.lons, fg_geo.lats, np.asarray(res_lons), np.asarray(res_lats), grid_snow_fg + ) + p_fg_elevs = gridpos2points( + fg_geo.lons, fg_geo.lats, np.asarray(res_lons), np.asarray(res_lats), gelevs_fg + ) + if glaf is not None: + p_fg_laf = gridpos2points( + fg_geo.lons, fg_geo.lats, np.asarray(res_lons), np.asarray(res_lats), glaf + ) + print(fg_geo.lons) + print(fg_geo.lats) + in_grid = inside_grid( + np.asarray(fg_geo.lons), + np.asarray(fg_geo.lats), + np.asarray(res_lons), + np.asarray(res_lats), + distance=2500.0, + ) + + # Ordering of points must be the same..... + obs = [] + flags = [] + cis = [] + lafs = [] + providers = [] + for i in range(0, p_fg_snow_depth.shape[0]): + + p_snow_fg = p_fg_snow_depth[i] + logging.debug("%s %s %s %s", i, p_snow_fg, res_lons[i], res_lats[i]) + if not np.isnan(p_snow_fg): + laf_ok = True + if glaf is not None: + if p_fg_laf[i] < laf_threshold: + laf_ok = False + logging.debug( + "Remove position because p_fg_laf=%s < %s", + p_fg_laf[i], + laf_threshold, + ) + # Check if in grid + if in_grid[i] and laf_ok: + obs_value = np.nan + try: + snow_class_val = p_snow_class[str(i)] + except KeyError: + continue + if snow_class_val == 2: + if p_snow_fg > 0: + if fg_threshold is not None: + if p_snow_fg <= fg_threshold: + obs_value = p_snow_fg + else: + obs_value = p_snow_fg + else: + obs_value = new_snow_depth + elif snow_class_val == 1: + if p_snow_fg >= 0.0: + obs_value = 0.0 + + if not np.isnan(obs_value): + flags.append(0) + cis.append(0) + lafs.append(0) + providers.append(0) + obs.append( + Observation( + validtime, + res_lons[i], + res_lats[i], + obs_value, + p_fg_elevs[i], + varname="totalSnowDepth", + ) + ) + + logging.info("Possible pseudo-observations: %s", n_x * n_y) + logging.info("Pseudo-observations created: %s", len(obs)) + return obs + + +def sm_obs_sentinel( + validtime, + grid_sm_class, + grid_lons, + grid_lats, + step, + fg_geo, + grid_sm_fg, + fg_threshold=1.0, +): + """Sentinel. + + Args: + validtime (_type_): _description_ + grid_sm_class (_type_): _description_ + grid_lons (_type_): _description_ + grid_lats (_type_): _description_ + step (_type_): _description_ + fg_geo (_type_): _description_ + grid_sm_fg (_type_): _description_ + fg_threshold (_type_, optional): _description_. Defaults to 1.. + + Returns: + _type_: _description_ + + """ + n_x = grid_lons.shape[0] + n_y = grid_lons.shape[1] + + n_x = int(n_x / step) + n_y = int(n_y / step) + + # TODO rewrite to use lonlatvals geo + counter = 0 + iii = 0 + res_lons = [] + res_lats = [] + p_sm_class = {} + for __ in range(0, n_x): + jjj = 0 + for __ in range(0, n_y): + res_lons.append(np.float64(grid_lons[iii, jjj])) + res_lats.append(np.float64(grid_lats[iii, jjj])) + p_sm_class.update({str(counter): grid_sm_class[iii, jjj]}) + counter = counter + 1 + jjj = jjj + step + iii = iii + step + + p_fg_sm = gridpos2points( + fg_geo.lons, fg_geo.lats, np.asarray(res_lons), np.asarray(res_lats), grid_sm_fg + ) + in_grid = inside_grid( + fg_geo.lons, + fg_geo.lats, + np.asarray(res_lons), + np.asarray(res_lats), + distance=2500.0, + ) + # Ordering of points must be the same..... + obs = [] + flags = [] + cis = [] + lafs = [] + providers = [] + for i in range(0, p_fg_sm.shape[0]): + + p_sm_fg = p_fg_sm[i] + if not np.isnan(p_sm_fg): + # Check if in grid + if in_grid[i]: + obs_value = np.nan + if (p_sm_class[str(i)] > 1) or (p_sm_class[str(i)] < 0): + if p_sm_fg <= fg_threshold: + obs_value = p_sm_fg + else: + obs_value = 999 + + else: + obs_value = p_sm_class[str(i)] + + if not np.isnan(obs_value): + flags.append(0) + cis.append(0) + lafs.append(0) + providers.append(0) + obs.append( + Observation( + validtime, + res_lons[i], + res_lats[i], + obs_value, + varname="surface_soil_moisture", + ) + ) + + logging.info("Possible pseudo-observations: %s", n_x * n_y) + logging.info("Pseudo-observations created: %s", len(obs)) + return obs + + +class SentinelObservationSet(ObservationSet): + """JSON observation set.""" + + def __init__( + self, + filename, + validtime, + fg_geo, + grid_sm_fg, + label="sentinel", + step=2, + fg_threshold=1.0, + ): + """Construct an observation data set from a json file. + + Args: + filename (list): Filename + validtime (as_datetime): Valdid time + fg_geo (Geo): Surfex geometry + grid_sm_fg (np.ndarray): Snow first guess field + label (str, optional): Label of set. Defaults to "sentinel". + step (int, optional): Step to process grid points. Defaults to 2. + fg_threshold (float, optional): First guess threshold. Defaults to 1.0 + + """ + grid_lons, grid_lats, grid_sm_class = read_sentinel_nc(filename) + observations = sm_obs_sentinel( + validtime, + grid_sm_class, + grid_lons, + grid_lats, + step, + fg_geo, + grid_sm_fg, + fg_threshold, + ) + + ObservationSet.__init__(self, observations, label=label) + + +class CryoclimObservationSet(ObservationSet): + """JSON observation set.""" + + def __init__( + self, + filenames, + validtime, + fg_geo, + snow_fg, + gelevs_fg, + label="cryo", + step=2, + fg_threshold=0.4, + new_snow_depth=0.1, + glaf=None, + laf_threshold=0.1, + ): + """Construct an observation data set from a json file. + + Args: + filenames (list): Filename + validtime (as_datetime): Valdid time + fg_geo (Geo): Surfex geometry + snow_fg (np.ndarray): Snow first guess field + gelevs_fg: Grid elevations + label (str, optional): Label of set. Defaults to "cryo". + step (int, optional): Step to process grid points. Defaults to 2. + fg_threshold (float, optional): First guess threshold. Defaults to 0.4 + new_snow_depth (float, optional): New snow depth in cryoclim in m.Defaults to 0.1 + glaf (np.ndarray, optional): Land-area-fraction. Defaults to None. + laf_threshold (float, optional): Threshold for existing land-area-fraction. Defaults to 0.1. + + """ + grid_lons, grid_lats, grid_snow_class = read_cryoclim_nc(filenames) + observations = snow_pseudo_obs_cryoclim( + validtime, + grid_snow_class, + grid_lons, + grid_lats, + step, + fg_geo, + snow_fg, + gelevs_fg, + fg_threshold, + new_snow_depth, + glaf=glaf, + laf_threshold=laf_threshold, + ) + + ObservationSet.__init__(self, observations, label=label) diff --git a/tests/conftest.py b/tests/conftest.py index 5d4216d..61a619a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -684,6 +684,77 @@ def firstguess4gridpp(tmp_path_factory): return fname +@pytest.fixture() +def data_cryoclim_nc_file(tmp_path_factory): + fname = f"{tmp_path_factory.getbasetemp().as_posix()}/cryoclim_nc.nc" + cdlfname = f"{tmp_path_factory.getbasetemp().as_posix()}/cryoclim_nc.cdl" + with open(cdlfname, mode="w", encoding="utf-8") as fhandler: + fhandler.write( + """ +netcdf cryoclim { +dimensions: + time = 1 ; + xc = 2 ; + yc = 3 ; +variables: + int lambert_conformal_conic ; + lambert_conformal_conic:grid_mapping_name = "lambert_conformal_conic" ; + lambert_conformal_conic:standard_parallel = 63., 63. ; + lambert_conformal_conic:longitude_of_central_meridian = 15. ; + lambert_conformal_conic:latitude_of_projection_origin = 63. ; + lambert_conformal_conic:earth_radius = 6371000. ; + lambert_conformal_conic:proj4 = "+proj=lcc +lon_0=15 +lat_0=63 +lat_1=63 +lat_2=63 +R=6371000 +no_defs" ; + double time(time) ; + time:axis = "T" ; + time:long_name = "reference time of product" ; + time:standard_name = "time" ; + time:units = "seconds since 1978-01-01 00:00:00" ; + time:calendar = "standard" ; + time:bounds = "time_bnds" ; + double xc(xc) ; + xc:axis = "X" ; + xc:long_name = "x-coordinate in Cartesian system" ; + xc:standard_name = "projection_x_coordinate" ; + xc:units = "m" ; + double yc(yc) ; + yc:axis = "Y" ; + yc:long_name = "y-coordinate in Cartesian system" ; + yc:standard_name = "projection_y_coordinate" ; + yc:units = "m" ; + float lon(yc, xc) ; + lon:long_name = "longitude coordinate" ; + lon:standard_name = "longitude" ; + lon:units = "degrees_east" ; + float lat(yc, xc) ; + lat:long_name = "latitude coordinate" ; + lat:standard_name = "latitude" ; + lat:units = "degrees_north" ; + int classed_product(time, yc, xc) ; + classed_product:_FillValue = -99 ; + classed_product:least_significant_digit = 3 ; + classed_product:units = "1" ; + classed_product:long_name = "-1: ocean, 0: snow free, 1: snow, 3: clouded, 4: no data" ; + classed_product:coordinates = "lat lon" ; + classed_product:grid_mapping = "lambert_conformal_conic" ; + +data: + +time = 1425211200; + +lon = 10, 10.1, 10.0, 10.1, 10.0, 10.1; + +lat = 60.0, 60.1, 60.2, 60.3, 60.4, 60.5; + +classed_product = 0, 1, 0, 3, 0, 4; +} +""" + ) + Dataset(fname, mode="w").fromcdl( + cdlfname, ncfilename=fname, mode="a", format="NETCDF3_CLASSIC" + ) + return fname + + class DummyFrostRequest: def __init__(self): """Construct dummy Frost request.""" diff --git a/tests/smoke/test_cli_misc.py b/tests/smoke/test_cli_misc.py index 630ce62..b08f2e2 100644 --- a/tests/smoke/test_cli_misc.py +++ b/tests/smoke/test_cli_misc.py @@ -49,83 +49,12 @@ def test_cli_set_geo_from_obs_set(obsset_fname, tmp_path_factory): get_geo_object(json.load(fhandler)) -@pytest.fixture() -def data_cryoclim_nc_file(tmp_path_factory): - fname = f"{tmp_path_factory.getbasetemp().as_posix()}/cryoclim_nc.nc" - cdlfname = f"{tmp_path_factory.getbasetemp().as_posix()}/cryoclim_nc.cdl" - with open(cdlfname, mode="w", encoding="utf-8") as fhandler: - fhandler.write( - """ -netcdf cryoclim { -dimensions: - time = 1 ; - xc = 2 ; - yc = 3 ; -variables: - int lambert_conformal_conic ; - lambert_conformal_conic:grid_mapping_name = "lambert_conformal_conic" ; - lambert_conformal_conic:standard_parallel = 63., 63. ; - lambert_conformal_conic:longitude_of_central_meridian = 15. ; - lambert_conformal_conic:latitude_of_projection_origin = 63. ; - lambert_conformal_conic:earth_radius = 6371000. ; - lambert_conformal_conic:proj4 = "+proj=lcc +lon_0=15 +lat_0=63 +lat_1=63 +lat_2=63 +R=6371000 +no_defs" ; - double time(time) ; - time:axis = "T" ; - time:long_name = "reference time of product" ; - time:standard_name = "time" ; - time:units = "seconds since 1978-01-01 00:00:00" ; - time:calendar = "standard" ; - time:bounds = "time_bnds" ; - double xc(xc) ; - xc:axis = "X" ; - xc:long_name = "x-coordinate in Cartesian system" ; - xc:standard_name = "projection_x_coordinate" ; - xc:units = "m" ; - double yc(yc) ; - yc:axis = "Y" ; - yc:long_name = "y-coordinate in Cartesian system" ; - yc:standard_name = "projection_y_coordinate" ; - yc:units = "m" ; - float lon(yc, xc) ; - lon:long_name = "longitude coordinate" ; - lon:standard_name = "longitude" ; - lon:units = "degrees_east" ; - float lat(yc, xc) ; - lat:long_name = "latitude coordinate" ; - lat:standard_name = "latitude" ; - lat:units = "degrees_north" ; - int classed_product(time, yc, xc) ; - classed_product:_FillValue = -99 ; - classed_product:least_significant_digit = 3 ; - classed_product:units = "1" ; - classed_product:long_name = "-1: ocean, 0: snow free, 1: snow, 3: clouded, 4: no data" ; - classed_product:coordinates = "lat lon" ; - classed_product:grid_mapping = "lambert_conformal_conic" ; - -data: - -time = _; - -lon = 10, 11; - -lat = 59, 60, 61; - -classed_product = 0, 1, 0, 3, 0, 4; -} -""" - ) - Dataset(fname, mode="w").fromcdl( - cdlfname, ncfilename=fname, mode="a", format="NETCDF3_CLASSIC" - ) - return fname - - def test_cryoclim_pseudoobs(tmp_path_factory, data_cryoclim_nc_file, firstguess4gridpp): out_fname = f"{tmp_path_factory.getbasetemp().as_posix()}/output_cryoclim.json" argv = [ "-step", - "4", + "1", "-fg", firstguess4gridpp, "-i", diff --git a/tests/unit/test_cryoclim.py b/tests/unit/test_cryoclim.py new file mode 100644 index 0000000..57a0593 --- /dev/null +++ b/tests/unit/test_cryoclim.py @@ -0,0 +1,41 @@ +"""Test cryoclim obs reading and obs set.""" + +import numpy as np + +from pysurfex.datetime_utils import as_datetime +from pysurfex.netcdf import read_cryoclim_nc +from pysurfex.pseudoobs import CryoclimObservationSet + + +def test_read_cryo_nc(data_cryoclim_nc_file): + infiles = [data_cryoclim_nc_file] + grid_lons, __, __ = read_cryoclim_nc(infiles) + + lons = np.ma.array([[10.0, 10.1], [10.0, 10.1], [10.0, 10.1]]) + np.testing.assert_almost_equal(lons, grid_lons, 3) + + +def test_get_cryo_obs_set(tmp_path_factory, data_cryoclim_nc_file, conf_proj_2x3): + + validtime = as_datetime("202303010600") + + fg_geo = conf_proj_2x3 + grid_snow_fg = np.ma.array([[0.10, 0.20, 0.30], [0.40, 0.50, 0.60]]) + gelevs = np.ma.array([[100, 200, 300], [400, 500, 600]]) + glaf_fg = np.ma.array([[0, 0.1, 0.3], [0.4, 0.5, 0.6]]) + + step = 1 + obs_set = CryoclimObservationSet( + [data_cryoclim_nc_file], + validtime, + fg_geo, + grid_snow_fg, + gelevs, + glaf=glaf_fg, + step=step, + laf_threshold=1.1, + ) + indent = 2 + output = f"{tmp_path_factory.getbasetemp().as_posix()}/cryo.json" + obs_set.write_json_file(output, indent=indent) + print(f"Written to {output}")