Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/pysurfex experiment #44

Merged
merged 6 commits into from
Sep 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
python = "^3.8"
dateutils = "^0.6.12"
f90nml = "^1.4.3"
humanize = "^3.14.0"
humanize = ">=3.14.0"
numexpr = "^2.8.4"
numpy = "^1.20.1"
pandas = "^1.3.0"
Expand Down
7 changes: 3 additions & 4 deletions pysurfex/binary_input.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,6 @@ def __init__(self, data):
def archive_files(self):
"""Archive files."""
for output_file, target in self.data.items():

logging.info("%s -> %s", output_file, target)
command = "mv"
if isinstance(target, dict):
Expand Down Expand Up @@ -95,7 +94,6 @@ def __init__(self, data):
def prepare_input(self):
"""Prepare input."""
for target, input_file in self.data.items():

logging.info("%s -> %s", target, input_file)
logging.debug(os.path.realpath(target))
command = None
Expand Down Expand Up @@ -358,8 +356,9 @@ def substitute(self, key, val, macros=None, micro="@", check_parsing=False):
pkey = key
pval = val
for spath_key, spath_val in self.platform.system_file_paths.items():
pkey = pkey.replace(f"{micro}{spath_key}{micro}", spath_val)
pval = pval.replace(f"{micro}{spath_key}{micro}", spath_val)
if isinstance(spath_val, str):
pkey = pkey.replace(f"{micro}{spath_key}{micro}", spath_val)
pval = pval.replace(f"{micro}{spath_key}{micro}", spath_val)
if macros is not None:
for macro_key, macro_val in macros.items():
pkey = pkey.replace(f"{micro}{macro_key}{micro}", macro_val)
Expand Down
37 changes: 33 additions & 4 deletions pysurfex/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,6 @@ def run_first_guess_for_oi(**kwargs):
cache = Cache(3600)
f_g = None
for var in variables:

inputfile = kwargs.get("inputfile")
fileformat = kwargs.get("inputformat")
logging.debug("inputfile: %s", inputfile)
Expand Down Expand Up @@ -289,6 +288,16 @@ def run_masterodb(**kwargs):
archive = kwargs["archive"]
only_archive = kwargs["only_archive"]
print_namelist = kwargs["print_namelist"]
try:
consistency = kwargs["no_consistency"]
if consistency:
consistency = False
except KeyError:
consistency = True
try:
assemble = kwargs["assemble_file"]
except KeyError:
assemble = None

check_existence = True
if "tolerate_missing" in kwargs:
Expand Down Expand Up @@ -346,7 +355,12 @@ def run_masterodb(**kwargs):
if os.path.isfile(namelist_path):
with open(namelist_path, mode="r", encoding="utf-8") as file_handler:
nam_defs = yaml.safe_load(file_handler)
nam_gen = NamelistGenerator(mode, config, nam_defs)
if assemble is not None:
with open(assemble, mode="r", encoding="utf8") as fh:
assemble = json.load(fh)
nam_gen = NamelistGenerator(
mode, config, nam_defs, assemble=assemble, consistency=consistency
)
my_settings = nam_gen.nml
if input_binary_data is None:
raise RuntimeError("input_binary_data not set")
Expand Down Expand Up @@ -402,7 +416,6 @@ def run_masterodb(**kwargs):
if output is not None:
exists = os.path.exists(output)
if not exists or force:

if binary is None:
my_batch = None

Expand Down Expand Up @@ -532,6 +545,17 @@ def run_surfex_binary(mode, **kwargs):
if "forc_zs" in kwargs:
forc_zs = kwargs["forc_zs"]

try:
consistency = kwargs["no_consistency"]
if consistency:
consistency = False
except KeyError:
consistency = True
try:
assemble = kwargs["assemble_file"]
except KeyError:
assemble = None

if mode == "pgd":
pgd = True
need_pgd = False
Expand Down Expand Up @@ -593,7 +617,12 @@ def run_surfex_binary(mode, **kwargs):
if os.path.isfile(namelist_path):
with open(namelist_path, mode="r", encoding="utf-8") as file_handler:
nam_defs = yaml.safe_load(file_handler)
nam_gen = NamelistGenerator(mode, config, nam_defs)
if assemble is not None:
with open(assemble, mode="r", encoding="utf8") as fh:
assemble = json.load(fh)
nam_gen = NamelistGenerator(
mode, config, nam_defs, assemble=assemble, consistency=consistency
)

my_settings = nam_gen.nml
if mode == "pgd":
Expand Down
21 changes: 20 additions & 1 deletion pysurfex/cmd_parsing.py
Original file line number Diff line number Diff line change
Expand Up @@ -851,6 +851,16 @@ def parse_args_masterodb(argv):
nargs="?",
help="Full path of MASTERODB binary",
)
parser.add_argument(
"--no-consistency", dest="no_consistency", action="store_true", required=False
)
parser.add_argument(
"--assemble",
type=str,
required=False,
help="Path to file containing list of namelist blocks",
default=None,
)

if len(argv) == 0:
parser.print_help()
Expand Down Expand Up @@ -985,6 +995,16 @@ def parse_args_surfex_binary(argv, mode):
help="JSON file with archive output",
)
parser.add_argument("binary", type=str, help="Command to run")
parser.add_argument(
"--no-consistency", dest="no_consistency", action="store_true", required=False
)
parser.add_argument(
"--assemble",
type=str,
required=False,
help="Path to file containing list of namelist blocks",
default=None,
)

if len(argv) == 0:
parser.print_help()
Expand Down Expand Up @@ -2212,7 +2232,6 @@ def variable_parser(needles, argv, parser):

kwargs = {}
for needle in needles:

argv = argv_string[start_indices[needle] : end_indices[needle]].split()
opt = parser.parse_args(argv)
vargs = {}
Expand Down
1 change: 0 additions & 1 deletion pysurfex/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -1323,7 +1323,6 @@ def read(self, variable, times):
tstep = 0
col = 0
for line in self.file.read().splitlines():

words = line.split()
if len(words) > 0:
for i, word in enumerate(words):
Expand Down
5 changes: 0 additions & 5 deletions pysurfex/forcing.py
Original file line number Diff line number Diff line change
Expand Up @@ -567,7 +567,6 @@ def run_time_loop(options, var_objs, att_objs):
# Loop output time steps
this_time = options["start"]
while this_time <= options["stop"]:

# Write for each time step
logging.info(
"Creating forcing for: %s time_step: %s",
Expand Down Expand Up @@ -639,7 +638,6 @@ def set_input_object(
# All objects with converters, find converter dict entry
conf_dict = {}
if forcingformat != "constant":

# Non-height dependent variables
if ref_height is None:
if "converter" in conf[sfx_var][forcingformat]:
Expand Down Expand Up @@ -694,7 +692,6 @@ def set_input_object(

obj = ConstantValue(geo, sfx_var, const_dict)
else:

# Construct the converter
converter = Converter(
selected_converter, first_base_time, defs, conf_dict, forcingformat
Expand Down Expand Up @@ -892,7 +889,6 @@ def set_forcing_config(**kwargs):
atts = ["ZS", "ZREF", "UREF"]
att_objs = []
for att_var in atts:

# Override with command line options for a given variable
ref_height = None
cformat = fileformat
Expand Down Expand Up @@ -947,7 +943,6 @@ def set_forcing_config(**kwargs):
var_objs = []
# Search in config file for parameters to override
for sfx_var in variables:

ref_height = None
cformat = fileformat
if sfx_var == "TA":
Expand Down
3 changes: 0 additions & 3 deletions pysurfex/geo.py
Original file line number Diff line number Diff line change
Expand Up @@ -653,7 +653,6 @@ def __init__(self, from_json, recreate=False):
and "ncols"
and "nrows" in domain_dict["nam_ign"]
):

self.clambert = domain_dict["nam_ign"]["clambert"]
npoints = domain_dict["nam_ign"]["npoints"]
self.x_x = domain_dict["nam_ign"]["xx"]
Expand Down Expand Up @@ -811,7 +810,6 @@ def ign_mask(pxall, pyall, xxx, yyy, recreate):

for i, pxall_val in enumerate(pxall):
for pyall_val in pyall:

count = count + 1
for k, xval in enumerate(xxx):
if xval == pxall_val and yyy[k] == pyall_val:
Expand Down Expand Up @@ -933,7 +931,6 @@ def set_domain(settings, domain, hm_mode=False):
if isinstance(settings, dict):
if domain in settings:
if hm_mode:

ezone = 11
if "EZONE" in settings[domain]:
ezone = settings[domain]["EZONE"]
Expand Down
1 change: 0 additions & 1 deletion pysurfex/input_methods.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@ def get_datasources(obs_time, settings):
"""
datasources = []
for obs_set in settings:

kwargs = {}
kwargs.update({"label": obs_set})

Expand Down
5 changes: 1 addition & 4 deletions pysurfex/namelist.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def __init__(self, program, config, definitions, assemble=None, consistency=True
self.assemble = self.namelist_blocks()
else:
self.assemble = assemble
logging.debug(self.assemble)
logging.info("Namelist blocks: %s", self.assemble)
nlres = self.assemble_namelist()
self.nml = f90nml.Namelist(nlres)
if consistency:
Expand Down Expand Up @@ -118,7 +118,6 @@ def namelist_blocks(self):

# Program specific settings
if self.program == "pgd":

input_blocks += ["pgd", "pgd_cover", "pgd_zs"]
eco_sg = self.config.get_setting("SURFEX#COVER#SG")
if eco_sg:
Expand Down Expand Up @@ -376,7 +375,6 @@ def concistency(self, nml):

# Program specific settings
if self.program == "pgd":

problems = self.check_nml_setting(
problems,
nml,
Expand Down Expand Up @@ -406,7 +404,6 @@ def concistency(self, nml):
self.config.get_setting("SURFEX#TILES#TOWN"),
)
if self.config.get_setting("SURFEX#TOWN#LTOWN_TO_ROCK"):

if self.config.get_setting("SURFEX#TILES#TOWN") != "NONE":
logging.warning(
"WARNING: TOWN is not NONE and you want LTOWN_TO_ROCK. "
Expand Down
3 changes: 0 additions & 3 deletions pysurfex/namelist_legacy.py
Original file line number Diff line number Diff line change
Expand Up @@ -1443,7 +1443,6 @@ def set_pgd_namelist(self, merged_dict):
self.config.get_setting("SURFEX#TILES#TOWN"),
)
if self.config.get_setting("SURFEX#TOWN#LTOWN_TO_ROCK"):

if self.config.get_setting("SURFEX#TILES#TOWN") != "NONE":
logging.warning(
"WARNING: TOWN is not NONE and you want LTOWN_TO_ROCK. "
Expand Down Expand Up @@ -1908,7 +1907,6 @@ def prepare_offline_perturbation(self, merged_dict):
merged_dict["NAM_VAR"].update({"NIVAR": 0})
merged_dict["NAM_IO_VARASSIM"].update({"LPRT": False})
if self.config.get_setting("SURFEX#ASSIM#SCHEMES#ISBA") == "EKF":

merged_dict = self.merge_json_namelist_file(
merged_dict, self.input_path + "/offline_assim_pert.json"
)
Expand Down Expand Up @@ -1941,7 +1939,6 @@ def prepare_offline_perturbation(self, merged_dict):
merged_dict = self.sub(merged_dict, "NAM_VAR", "NVAR", nvar)

if self.config.get_setting("SURFEX#ASSIM#SCHEMES#ISBA") == "ENKF":

merged_dict = self.merge_json_namelist_file(
merged_dict, self.input_path + "/offline_assim_pert.json"
)
Expand Down
1 change: 0 additions & 1 deletion pysurfex/netcdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -848,7 +848,6 @@ def oi2soda(dtg, t2m=None, rh2m=None, s_d=None, s_m=None, output=None):
"""

def check_input_to_soda_dimensions(my_nx, my_ny, nx1, ny1):

if my_nx < 0:
my_nx = nx1
if my_ny < 0:
Expand Down
1 change: 0 additions & 1 deletion pysurfex/platform_deps.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,6 @@ def parse_setting(
"""
# Check on arguments
if isinstance(setting, str):

if basedtg is not None:
if isinstance(basedtg, str):
basedtg = as_datetime(basedtg)
Expand Down
2 changes: 0 additions & 2 deletions pysurfex/pseudoobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,6 @@ def snow_pseudo_obs_cryoclim(
providers = []
logging.debug("p_fg_snow_depth.shape[0]=%s", p_fg_snow_depth.shape[0])
for i in range(0, p_fg_snow_depth.shape[0]):

p_snow_fg = p_fg_snow_depth[i]
logging.debug("%s %s %s %s", i, p_snow_fg, res_lons[i], res_lats[i])
if not np.isnan(p_snow_fg):
Expand Down Expand Up @@ -319,7 +318,6 @@ def sm_obs_sentinel(
lafs = []
providers = []
for i in range(0, p_fg_sm.shape[0]):

p_sm_fg = p_fg_sm[i]
if not np.isnan(p_sm_fg):
# Check if in grid
Expand Down
1 change: 0 additions & 1 deletion pysurfex/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -293,7 +293,6 @@ def __init__(
if self.prepfile.input_file is not None and os.path.abspath(
self.prepfile.filename
) != os.path.abspath(self.prepfile.input_file):

logging.info("Input PREP file is: %s", self.prepfile.input_file)
remove_existing_file(self.prepfile.input_file, self.prepfile.filename)
os.symlink(self.prepfile.input_file, self.prepfile.filename)
Expand Down
6 changes: 0 additions & 6 deletions pysurfex/titan.py
Original file line number Diff line number Diff line change
Expand Up @@ -1052,7 +1052,6 @@ def __init__(self, blacklist):
blacklist_stid = {}
if "lons" in blacklist and "lats" in blacklist:
for i in range(0, len(blacklist["lons"])):

if len(blacklist["lons"]) != len(blacklist["lats"]):
raise RuntimeError(
"Blacklist must have the same length for both lons and lats"
Expand Down Expand Up @@ -1092,7 +1091,6 @@ def test(self, dataset, mask, code=100):
flags = dataset.flags
for i, lon_val in enumerate(dataset.lons):
if i in mask:

lon = Observation.format_lon(lon_val)
lat = Observation.format_lat(dataset.lats[i])
stid = dataset.stids[i]
Expand Down Expand Up @@ -1342,7 +1340,6 @@ def define_quality_control(test_list, settings, an_time, domain_geo=None, blackl
tests.append(Buddy(**kwargs))

elif qct.lower() == "climatology":

if test_options is not None:
opts = ["minval", "maxval", "offset"]
for opt in opts:
Expand All @@ -1351,7 +1348,6 @@ def define_quality_control(test_list, settings, an_time, domain_geo=None, blackl
tests.append(Climatology(an_time, **kwargs))

elif qct.lower() == "sct":

if test_options is not None:
opts = [
"num_min",
Expand Down Expand Up @@ -1687,7 +1683,6 @@ def perform_tests(self):
mask = []
findex = 0
for obs_set in self.datasources:

if obs_set.label == "":
raise RuntimeError(
"Observations set for quality control are "
Expand Down Expand Up @@ -2003,7 +1998,6 @@ def merge_json_qc_data_sets(an_time, filenames, qc_flag=None, skip_flags=None):
index_pos = {}
data = {}
for filename in filenames:

if os.path.exists(filename):
with open(filename, mode="r", encoding="utf-8") as file_handler:
data1 = json.load(file_handler)
Expand Down
1 change: 0 additions & 1 deletion tests/smoke/test_cli_fg_titan_oi_obsmon.py
Original file line number Diff line number Diff line change
Expand Up @@ -583,7 +583,6 @@ def _qc_gridpp_obsmon(


def obsmon_test(var, qc_fname, first_guess_file, analysis_file, db_file):

translation = {
"t2m": "air_temperature_2m",
"rh2m": "relative_humidity_2m",
Expand Down
Loading
Loading