Skip to content

Commit

Permalink
Add cycolim/sentinel as an observation set
Browse files Browse the repository at this point in the history
  • Loading branch information
trygveasp committed Sep 18, 2023
1 parent 59d8339 commit 96ba7a8
Show file tree
Hide file tree
Showing 7 changed files with 483 additions and 293 deletions.
27 changes: 16 additions & 11 deletions pysurfex/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,9 +69,10 @@
read_sentinel_nc,
write_analysis_netcdf_file,
)
from .obs import Observation, sm_obs_sentinel, snow_pseudo_obs_cryoclim
from .obs import Observation
from .obsmon import write_obsmon_sqlite_file
from .platform_deps import SystemFilePathsFromFile
from .pseudoobs import CryoclimObservationSet, SentinelObservationSet
from .read import ConvertedInput, Converter
from .run import BatchJob, Masterodb, PerturbedOffline, SURFEXBinary
from .titan import (
Expand Down Expand Up @@ -1257,12 +1258,9 @@ def sentinel_obs(argv=None):
varname = kwargs["varname"]
indent = kwargs["indent"]

grid_lons, grid_lats, grid_sm_class = read_sentinel_nc(infiles)
fg_geo, validtime, grid_sm_fg, __, __ = read_first_guess_netcdf_file(fg_file, varname)
q_c = sm_obs_sentinel(
validtime, grid_sm_class, grid_lons, grid_lats, step, fg_geo, grid_sm_fg
)
q_c.write_output(output, indent=indent)
obs_set = SentinelObservationSet(infiles, validtime, fg_geo, grid_sm_fg, step=step)
obs_set.write_json_file(output, indent=indent)


def qc2obsmon(argv=None):
Expand Down Expand Up @@ -1612,7 +1610,7 @@ def cryoclim_pseudoobs(argv=None):
)
else:
logging.basicConfig(
format="%(asctime)s %(levelname)s %(message)s", level=logging.INFO
format="%(asctime)s %(levelname,)s %(message)s", level=logging.INFO
)
logging.info("************ cryoclim_pseudoobs ******************")

Expand All @@ -1622,15 +1620,22 @@ def cryoclim_pseudoobs(argv=None):
output = kwargs["output"]
varname = kwargs["varname"]
indent = kwargs["indent"]
laf_threshold = kwargs["laf_threshold"]

grid_lons, grid_lats, grid_snow_class = read_cryoclim_nc(infiles)
fg_geo, validtime, grid_snow_fg, glafs, gelevs = read_first_guess_netcdf_file(
fg_file, varname
)
q_c = snow_pseudo_obs_cryoclim(
validtime, grid_snow_class, grid_lons, grid_lats, step, fg_geo, grid_snow_fg, gelevs
obs_set = CryoclimObservationSet(
infiles,
validtime,
fg_geo,
grid_snow_fg,
gelevs,
step=step,
glaf=glafs,
laf_threshold=laf_threshold,
)
q_c.write_output(output, indent=indent)
obs_set.write_json_file(output, indent=indent)


def create_namelist(argv=None):
Expand Down
9 changes: 9 additions & 0 deletions pysurfex/cmd_parsing.py
Original file line number Diff line number Diff line change
Expand Up @@ -2024,6 +2024,15 @@ def parse_cryoclim_pseudoobs(argv):
default=None,
required=False,
)
parser.add_argument(
"-gt",
"--laf_threshold",
dest="laf_threshold",
type=float,
help="LandAreaFraction threshold",
default=0.1,
required=False,
)

if len(argv) == 0:
parser.print_help()
Expand Down
210 changes: 0 additions & 210 deletions pysurfex/obs.py
Original file line number Diff line number Diff line change
Expand Up @@ -811,213 +811,3 @@ def __init__(self, an_time, filename, label=""):
)

ObservationSet.__init__(self, observations, label=label)


def snow_pseudo_obs_cryoclim(
validtime,
grid_snow_class,
grid_lons,
grid_lats,
step,
fg_geo,
grid_snow_fg,
gelevs_fg,
fg_threshold=.4,
new_snow_depth=0.1,
):
"""Cryoclim snow.
Args:
validtime (_type_): _description_
grid_snow_class (_type_): _description_
grid_lons (_type_): _description_
grid_lats (_type_): _description_
step (_type_): _description_
fg_geo (_type_): _description_
grid_snow_fg (_type_): _description_
fg_threshold (float, optional): _description_. Defaults to 2.0.
new_snow_depth (float, optional): _description_. Defaults to 0.01.
Returns:
_type_: _description_
"""
n_x = grid_lons.shape[0]
n_y = grid_lons.shape[1]

n_x = int(n_x / step)
n_y = int(n_y / step)

# TODO rewrite to use lonlatvals geo
counter = 0
iii = 0
res_lons = []
res_lats = []
p_snow_class = {}
for __ in range(0, n_x):
jjj = 0
for __ in range(0, n_y):

if ( ( (grid_snow_class[0,iii,jjj] == 2) or (grid_snow_class[0,iii,jjj] == 1) ) and ((grid_lats[iii,jjj] < 90) and (grid_lats[iii,jjj] > 0.0)) and ((grid_lons[iii,jjj] < 180) and (grid_lons[iii,jjj] > -180.0)) ):
res_lons.append(grid_lons[iii, jjj])
res_lats.append(grid_lats[iii, jjj])
p_snow_class.update({str(counter): grid_snow_class[0, iii, jjj]})
print(grid_snow_class[0, iii, jjj])
counter = counter + 1
jjj = jjj + step
iii = iii + step

p_fg_snow_depth = gridpos2points(
fg_geo.lons, fg_geo.lats, np.asarray(res_lons), np.asarray(res_lats), grid_snow_fg)
p_fg_elevs = gridpos2points(
fg_geo.lons, fg_geo.lats, np.asarray(res_lons), np.asarray(res_lats), gelevs_fg)
in_grid = inside_grid(
fg_geo.lons,
fg_geo.lats,
np.asarray(res_lons),
np.asarray(res_lats),
distance=2500.0,
)

# Ordering of points must be the same.....
obs = []
flags = []
cis = []
lafs = []
providers = []
for i in range(0, p_fg_snow_depth.shape[0]):

p_snow_fg = p_fg_snow_depth[i]
logging.debug("%s %s %s %s", i, p_snow_fg, res_lons[i], res_lats[i])
if not np.isnan(p_snow_fg):
# Check if in grid
if in_grid[i]:
obs_value = np.nan
try:
snow_class_val = p_snow_class[str(i)]
except KeyError:
continue
if snow_class_val == 2:
if p_snow_fg > 0:
if fg_threshold is not None:
if p_snow_fg <= fg_threshold:
obs_value = p_snow_fg
else:
obs_value = p_snow_fg
else:
obs_value = new_snow_depth
elif snow_class_val == 1:
if p_snow_fg >= 0.0:
obs_value = 0.0

if not np.isnan(obs_value):
flags.append(0)
cis.append(0)
lafs.append(0)
providers.append(0)
obs.append(
Observation(validtime, res_lons[i], res_lats[i], obs_value,p_fg_elevs[i], varname="totalSnowDepth")
)

logging.info("Possible pseudo-observations: %s", n_x * n_y)
logging.info("Pseudo-observations created: %s", len(obs))
return QCDataSet(validtime, obs, flags, cis, lafs, providers)


def sm_obs_sentinel(
validtime,
grid_sm_class,
grid_lons,
grid_lats,
step,
fg_geo,
grid_sm_fg,
fg_threshold=1.0,
):
"""Sentinel.
Args:
validtime (_type_): _description_
grid_sm_class (_type_): _description_
grid_lons (_type_): _description_
grid_lats (_type_): _description_
step (_type_): _description_
fg_geo (_type_): _description_
grid_sm_fg (_type_): _description_
fg_threshold (_type_, optional): _description_. Defaults to 1..
Returns:
_type_: _description_
"""
n_x = grid_lons.shape[0]
n_y = grid_lons.shape[1]

n_x = int(n_x / step)
n_y = int(n_y / step)

# TODO rewrite to use lonlatvals geo
counter = 0
iii = 0
res_lons = []
res_lats = []
p_sm_class = {}
for __ in range(0, n_x):
jjj = 0
for __ in range(0, n_y):
res_lons.append(grid_lons[iii, jjj])
res_lats.append(grid_lats[iii, jjj])
p_sm_class.update({str(counter): grid_sm_class[iii, jjj]})
counter = counter + 1
jjj = jjj + step
iii = iii + step

p_fg_sm = gridpos2points(
fg_geo.lons, fg_geo.lats, np.asarray(res_lons), np.asarray(res_lats), grid_sm_fg
)
in_grid = inside_grid(
fg_geo.lons,
fg_geo.lats,
np.asarray(res_lons),
np.asarray(res_lats),
distance=2500.0,
)
# Ordering of points must be the same.....
obs = []
flags = []
cis = []
lafs = []
providers = []
for i in range(0, p_fg_sm.shape[0]):

p_sm_fg = p_fg_sm[i]
if not np.isnan(p_sm_fg):
# Check if in grid
if in_grid[i]:
obs_value = np.nan
if (p_sm_class[str(i)] > 1) or (p_sm_class[str(i)] < 0):
if p_sm_fg <= fg_threshold:
obs_value = p_sm_fg
else:
obs_value = 999

else:
obs_value = p_sm_class[str(i)]

if not np.isnan(obs_value):
flags.append(0)
cis.append(0)
lafs.append(0)
providers.append(0)
obs.append(
Observation(
validtime,
res_lons[i],
res_lats[i],
obs_value,
varname="surface_soil_moisture",
)
)

logging.info("Possible pseudo-observations: %s", n_x * n_y)
logging.info("Pseudo-observations created: %s", len(obs))
return QCDataSet(validtime, obs, flags, cis, lafs, providers)
Loading

0 comments on commit 96ba7a8

Please sign in to comment.