From 98170f1d432b10a91ec994a9830cddf8296007e1 Mon Sep 17 00:00:00 2001 From: Julien Groenenboom Date: Mon, 12 Feb 2024 15:25:27 +0100 Subject: [PATCH] Copied code used in GLOPARTS to 'case_gloparts' and removed GLOPARTS-related parts from main code --- Project.toml | 1 - case_gloparts/Particles.jl/Project.toml | 30 + case_gloparts/Particles.jl/drifters.jl | 402 ++++++++++ case_gloparts/Particles.jl/src/Particles.jl | 50 ++ .../Particles.jl/src/cartesian_grid.jl | 324 ++++++++ case_gloparts/Particles.jl/src/cli.jl | 35 + case_gloparts/Particles.jl/src/cmems_grid.jl | 279 +++++++ case_gloparts/Particles.jl/src/dflow.jl | 450 +++++++++++ .../Particles.jl/src/dflow_his_to_zarr.jl | 441 +++++++++++ .../src/dflow_map_interp_to_zarr.jl | 698 ++++++++++++++++++ case_gloparts/Particles.jl/src/era5.jl | 155 ++++ case_gloparts/Particles.jl/src/grids.jl | 10 + .../Particles.jl/src/matroos_grid.jl | 163 ++++ .../Particles.jl/src/particles_core.jl | 384 ++++++++++ .../Particles.jl/src/unstructured_grid.jl | 566 ++++++++++++++ case_gloparts/Particles.jl/src/wms_client.jl | 158 ++++ case_gloparts/README.txt | 4 + case_gloparts/test/test_cmems_grid.jl | 96 +++ case_gloparts/test/test_dflow.jl | 162 ++++ .../DFM_OUTPUT_cb_2d/cb_2d.dia | 0 .../DFM_OUTPUT_cb_2d/cb_2d_0000.dia | 0 .../DFM_OUTPUT_cb_2d/cb_2d_0000_his.nc | Bin .../DFM_OUTPUT_cb_2d/cb_2d_0000_map.nc | Bin .../DFM_OUTPUT_cb_2d/cb_2d_0001.dia | 0 .../DFM_OUTPUT_cb_2d/cb_2d_0001_map.nc | Bin .../DFM_interpreted_idomain_bendcurv_net.nc | Bin .../01_flow_to_right/Velocity.bc | 0 .../01_flow_to_right/bendcurv_0000_net.nc | Bin .../01_flow_to_right/bendcurv_0001_net.nc | Bin .../01_flow_to_right/bendcurv_net.nc | Bin .../01_flow_to_right/cb3d-1_bnd.ext | 0 .../01_flow_to_right/cb3d-1_obs.xyn | 0 .../01_flow_to_right/cb3d-1_thd.pli | 0 .../01_flow_to_right/cb_2d.mdu | 0 .../01_flow_to_right/cb_2d_0000.cache | Bin .../01_flow_to_right/cb_2d_0000.mdu | 0 .../01_flow_to_right/cb_2d_0001.cache | Bin .../01_flow_to_right/cb_2d_0001.mdu | 0 .../01_flow_to_right/dimr_config.xml | 0 .../01_flow_to_right/inner-east.pli | 0 .../01_flow_to_right/inner-south.pli | 0 .../01_flow_to_right/run_parallel.bat | 0 .../DFM_OUTPUT_cb_2d/cb_2d.dia | 0 .../DFM_OUTPUT_cb_2d/cb_2d_0000.dia | 0 .../DFM_OUTPUT_cb_2d/cb_2d_0000_his.nc | Bin .../DFM_OUTPUT_cb_2d/cb_2d_0000_map.nc | Bin .../DFM_OUTPUT_cb_2d/cb_2d_0001.dia | 0 .../DFM_OUTPUT_cb_2d/cb_2d_0001_map.nc | Bin .../DFM_interpreted_idomain_bendcurv_net.nc | Bin .../02_flow_to_left/Velocity.bc | 0 .../02_flow_to_left/bendcurv_0000_net.nc | Bin .../02_flow_to_left/bendcurv_0001_net.nc | Bin .../02_flow_to_left/bendcurv_net.nc | Bin .../02_flow_to_left/cb3d-1_bnd.ext | 0 .../02_flow_to_left/cb3d-1_obs.xyn | 0 .../02_flow_to_left/cb3d-1_thd.pli | 0 .../02_flow_to_left/cb_2d.mdu | 0 .../02_flow_to_left/cb_2d_0000.cache | Bin .../02_flow_to_left/cb_2d_0000.mdu | 0 .../02_flow_to_left/cb_2d_0001.cache | Bin .../02_flow_to_left/cb_2d_0001.mdu | 0 .../02_flow_to_left/dimr_config.xml | 0 .../02_flow_to_left/inner-east.pli | 0 .../02_flow_to_left/inner-south.pli | 0 .../02_flow_to_left/run_parallel.bat | 0 .../02_flow_to_left/unstruc.dia | 0 case_gloparts/workDir/config.toml | 55 ++ case_gloparts/workDir/config_template.toml | 55 ++ case_gloparts/workDir/run_julia.bat | 4 + case_gloparts/workDir/run_julia.sh | 3 + drifters.jl | 44 +- src/cmems_grid.jl | 106 +-- src/dflow.jl | 93 +-- test/test_cmems_grid.jl | 29 - test/test_dflow.jl | 30 - 75 files changed, 4564 insertions(+), 263 deletions(-) create mode 100644 case_gloparts/Particles.jl/Project.toml create mode 100644 case_gloparts/Particles.jl/drifters.jl create mode 100644 case_gloparts/Particles.jl/src/Particles.jl create mode 100644 case_gloparts/Particles.jl/src/cartesian_grid.jl create mode 100644 case_gloparts/Particles.jl/src/cli.jl create mode 100644 case_gloparts/Particles.jl/src/cmems_grid.jl create mode 100644 case_gloparts/Particles.jl/src/dflow.jl create mode 100644 case_gloparts/Particles.jl/src/dflow_his_to_zarr.jl create mode 100644 case_gloparts/Particles.jl/src/dflow_map_interp_to_zarr.jl create mode 100644 case_gloparts/Particles.jl/src/era5.jl create mode 100644 case_gloparts/Particles.jl/src/grids.jl create mode 100644 case_gloparts/Particles.jl/src/matroos_grid.jl create mode 100644 case_gloparts/Particles.jl/src/particles_core.jl create mode 100644 case_gloparts/Particles.jl/src/unstructured_grid.jl create mode 100644 case_gloparts/Particles.jl/src/wms_client.jl create mode 100644 case_gloparts/README.txt create mode 100644 case_gloparts/test/test_cmems_grid.jl create mode 100644 case_gloparts/test/test_dflow.jl rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d.dia (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0000.dia (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0000_his.nc (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0000_map.nc (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0001.dia (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0001_map.nc (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_interpreted_idomain_bendcurv_net.nc (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/01_flow_to_right/Velocity.bc (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/01_flow_to_right/bendcurv_0000_net.nc (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/01_flow_to_right/bendcurv_0001_net.nc (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/01_flow_to_right/bendcurv_net.nc (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/01_flow_to_right/cb3d-1_bnd.ext (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/01_flow_to_right/cb3d-1_obs.xyn (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/01_flow_to_right/cb3d-1_thd.pli (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/01_flow_to_right/cb_2d.mdu (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/01_flow_to_right/cb_2d_0000.cache (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/01_flow_to_right/cb_2d_0000.mdu (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/01_flow_to_right/cb_2d_0001.cache (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/01_flow_to_right/cb_2d_0001.mdu (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/01_flow_to_right/dimr_config.xml (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/01_flow_to_right/inner-east.pli (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/01_flow_to_right/inner-south.pli (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/01_flow_to_right/run_parallel.bat (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d.dia (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d_0000.dia (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d_0000_his.nc (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d_0000_map.nc (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d_0001.dia (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d_0001_map.nc (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_interpreted_idomain_bendcurv_net.nc (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/02_flow_to_left/Velocity.bc (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/02_flow_to_left/bendcurv_0000_net.nc (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/02_flow_to_left/bendcurv_0001_net.nc (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/02_flow_to_left/bendcurv_net.nc (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/02_flow_to_left/cb3d-1_bnd.ext (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/02_flow_to_left/cb3d-1_obs.xyn (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/02_flow_to_left/cb3d-1_thd.pli (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/02_flow_to_left/cb_2d.mdu (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/02_flow_to_left/cb_2d_0000.cache (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/02_flow_to_left/cb_2d_0000.mdu (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/02_flow_to_left/cb_2d_0001.cache (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/02_flow_to_left/cb_2d_0001.mdu (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/02_flow_to_left/dimr_config.xml (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/02_flow_to_left/inner-east.pli (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/02_flow_to_left/inner-south.pli (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/02_flow_to_left/run_parallel.bat (100%) rename {test_data => case_gloparts/test_data}/dfm_multiple_runs_and_domains/02_flow_to_left/unstruc.dia (100%) create mode 100644 case_gloparts/workDir/config.toml create mode 100644 case_gloparts/workDir/config_template.toml create mode 100644 case_gloparts/workDir/run_julia.bat create mode 100644 case_gloparts/workDir/run_julia.sh diff --git a/Project.toml b/Project.toml index 5d6f188..299f336 100644 --- a/Project.toml +++ b/Project.toml @@ -12,7 +12,6 @@ DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" Dates = "ade2ca70-3891-5945-98fb-dc099432e06a" FileIO = "5789e2e9-d7fb-5bc7-8068-2c6fae9b9549" GZip = "92fee26a-97fe-5a0c-ad85-20a5f3185b63" -Glob = "c27321d9-0574-5035-807b-f59d2c89b15c" HTTP = "cd3eb016-35fb-5094-929b-558a96fad6f3" IJulia = "7073ff75-c697-5162-941a-fcdaad2a7d2a" ImageMagick = "6218d12a-5da1-5696-b52f-db25d2ecc6d1" diff --git a/case_gloparts/Particles.jl/Project.toml b/case_gloparts/Particles.jl/Project.toml new file mode 100644 index 0000000..28f512f --- /dev/null +++ b/case_gloparts/Particles.jl/Project.toml @@ -0,0 +1,30 @@ +name = "Particles" +uuid = "9b8f9cb0-d3d2-11e9-0098-050335538f81" +authors = ["robot144 "] +version = "0.1.1" + +[deps] +ArgParse = "c7e460c6-2fb9-53a9-8c5b-16f535851c63" +Base64 = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" +CSV = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b" +DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" +Dates = "ade2ca70-3891-5945-98fb-dc099432e06a" +FileIO = "5789e2e9-d7fb-5bc7-8068-2c6fae9b9549" +GZip = "92fee26a-97fe-5a0c-ad85-20a5f3185b63" +Glob = "c27321d9-0574-5035-807b-f59d2c89b15c" +HTTP = "cd3eb016-35fb-5094-929b-558a96fad6f3" +ImageMagick = "6218d12a-5da1-5696-b52f-db25d2ecc6d1" +Interpolations = "a98d9a8b-a2ab-59e6-89dd-64a1c18fca59" +LabelledArrays = "2ee39098-c373-598a-b85f-a56591580800" +NetCDF = "30363a11-5582-574a-97bb-aa9a979735b9" +Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80" +Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7" +Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" +RawArray = "d3d335b2-f152-507c-820e-958e337efb65" +StaticArrays = "90137ffa-7385-5640-81b9-e52037218182" +TOML = "fa267f1f-6049-4f14-aa54-33bafae1ed76" +Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" +Zarr = "0a941bbe-ad1d-11e8-39d9-ab76183a1d99" + +[compat] +NetCDF = "0.11" diff --git a/case_gloparts/Particles.jl/drifters.jl b/case_gloparts/Particles.jl/drifters.jl new file mode 100644 index 0000000..465c434 --- /dev/null +++ b/case_gloparts/Particles.jl/drifters.jl @@ -0,0 +1,402 @@ +# +# Run simulation for drifter model. +# Usage: julia --project case_drifter_challenge/drifters.jl /path/to/config.toml|csv" +# + +cd("../Particles.jl") +import Pkg +using Pkg + +Pkg.activate(".") +Pkg.instantiate() + +using Particles +using Plots + +#include(joinpath(@__DIR__, "drifterfunctions.jl")) + +# +# read user input +# +usage = "Usage: julia --project=.. drifters.jl /path/to/config.toml|csv" +n = length(ARGS) +if n != 1 + throw(ArgumentError(usage)) +end +config_path = only(ARGS) +if !isfile(config_path) + throw(ArgumentError("File not found: $(config_path)\n" * usage)) +else + println("Reading config from file $(config_path).") +end +d = Particles.config(config_path) + +# +# check and initialize +# +if !isa(d["id"], Vector) + d["id"] = [d["id"]] + d["x"] = [d["x"]] + d["y"] = [d["y"]] +end +d["nsources"] = length(d["id"]) +if length(d["x"]) != d["nsources"] + error("Length of x not equal to $(d["nsources"])") +end +if length(d["y"]) != d["nsources"] + error("Length of y not equal to $(d["nsources"])") +end + +if !haskey(d, "coordinates") + d["coordinates"] = "spherical" +end + +# simulation timing +d["time_direction"] = :forwards # :forwards or :backwards +if !haskey(d, "tstart") + d["tstart"] = 0.0 +end +if !haskey(d, "tend") + error("Final time of simulation tend is missing in config.") +end +if !haskey(d, "dt") + error("Time-step in seconds dt is missing in config.") +end +if !haskey(d, "write_maps_interval") + d["write_maps_interval"] = 1800.0 # seconds +end +if !haskey(d, "plot_maps_interval") + d["plot_maps_interval"] = 7200.0 # seconds +end + +# +# flow data from delft3d-fm +# optionally also winds through this route +# + +function zero_fun(x, y, z, t) #zero everywhere + return 0.0 +end + + +# check keywords for flowdata +if !haskey(d, "current_dir") + d["current_dir"] = d["datapath"] +end +current_dir = d["current_dir"] +if haskey(d, "current_filename") + d["current_x_filename"] = d["current_filename"] + d["current_y_filename"] = d["current_filename"] +end +if !haskey(d, "current_x_filename") + error("Missing key: current_x_filename") +end +if !haskey(d, "current_y_filename") + error("Missing key: current_y_filename") +end +if !haskey(d, "current_filetype") + error("Missing key: current_filetype") +end +if !haskey(d,"current_x_var") + d["current_x_var"] = "longitude" +end +if !haskey(d,"current_y_var") + d["current_y_var"] = "latitude" +end +if lowercase(d["current_filetype"]) == "cmems" + if !haskey(d,"current_ucx_var") + d["current_ucx_var"] = "uo" + end + if !haskey(d,"current_ucy_var") + d["current_ucy_var"] = "vo" + end +elseif lowercase(d["current_filetype"]) == "delft3d-fm" + if !haskey(d,"current_ucx_var") + d["current_ucx_var"] = "mesh2d_ucx" + end + if !haskey(d,"current_ucy_var") + d["current_ucy_var"] = "mesh2d_ucy" + end +end + +# create u and v functions for flowdata +if lowercase(d["current_filetype"]) == "cmems" + cmems_u = CmemsData(current_dir, d["current_x_filename"]; lon = d["current_x_var"], lat = d["current_y_var"], reftime=d["reftime"], tstart=d["tstart"], tend=d["tend"]) + cmems_v = CmemsData(current_dir, d["current_y_filename"]; lon = d["current_x_var"], lat = d["current_y_var"], reftime=d["reftime"], tstart=d["tstart"], tend=d["tend"]) + t0 = d["reftime"] + u = initialize_interpolation(cmems_u, d["current_ucx_var"], t0, 0.0) # water velocity x-dir + v = initialize_interpolation(cmems_v, d["current_ucy_var"], t0, 0.0) # water velocity y-dir +elseif lowercase(d["current_filetype"]) == "delft3d-fm" + dflow_map = load_nc_info(current_dir, d["current_filename"]; reftime=d["reftime"], tstart=d["tstart"], tend=d["tend"]) + const interp = load_dflow_grid(dflow_map, 50, true) + u = initialize_interpolation(dflow_map, interp, d["current_ucx_var"], d["reftime"], 0.0, d["time_direction"]); + v = initialize_interpolation(dflow_map, interp, d["current_ucy_var"], d["reftime"], 0.0, d["time_direction"]); +elseif lowercase(d["current_filetype"]) == "zero" + u = zero_fun + v = zero_fun +else + error("Invalid current_filtype: $(d["current_filetype"])") +end + +# create u and v functions for SECOND flowdata +if haskey(d, "current2_filetype") + if lowercase(d["current2_filetype"]) == "delft3d-fm" + dflow_map = load_nc_info(d["current2_dir"], d["current2_filename"]; reftime=d["reftime"], tstart=d["tstart"], tend=d["tend"]) + const interp = load_dflow_grid(dflow_map, 50, true) + u2 = initialize_interpolation(dflow_map, interp, d["current2_ucx_var"], d["reftime"], 0.0, d["time_direction"]); + v2 = initialize_interpolation(dflow_map, interp, d["current2_ucy_var"], d["reftime"], 0.0, d["time_direction"]); + println("A second flow-field will be used from: $(d["current2_filename"])") + elseif lowercase(d["current2_filetype"]) == "zero" + u2 = zero_fun + v2 = zero_fun + else + error("Invalid current2_filetype (only 'delft3d-fm' is supported): $(d["current2_filetype"])") + end +else + u2 = zero_fun + v2 = zero_fun +end + +# check input for winds +if !haskey(d, "wind_dir") + d["wind_dir"] = d["datapath"] +end +if haskey(d, "wind_filename") + d["wind_x_filename"] = d["wind_filename"] + d["wind_y_filename"] = d["wind_filename"] +end +if !haskey(d, "wind_x_filename") + error("Missing key: wind_x_filename") +end +if !haskey(d, "wind_y_filename") + error("Missing key: wind_y_filename") +end +if !haskey(d, "wind_filetype") + error("Missing key: wind_filetype") +end +if !haskey(d,"wind_x_var") + d["wind_x_var"] = "x" +end +if !haskey(d,"wind_y_var") + d["wind_y_var"] = "y" +end + +# create u_wind and v_wind +if lowercase(d["wind_filetype"]) == "gfs" + # wind data from gfs + gfs_u = GFSData(d["wind_dir"], d["wind_x_filename"]; lon = d["wind_x_var"], lat = d["wind_y_var"], reftime=d["reftime"], tstart=d["tstart"], tend=d["tend"]) + gfs_v = GFSData(d["wind_dir"], d["wind_y_filename"]; lon = d["wind_x_var"], lat = d["wind_y_var"], reftime=d["reftime"], tstart=d["tstart"], tend=d["tend"]) + t0 = d["reftime"] + if !haskey(d,"wind_x_wrap") + if all(d["x"] .>= -180.0 * d["x"] .<= 180.0) && all(d["y"] .>= -90.0 * d["y"] .<= 90.0) && all(gfs_u.grid.xnodes .>= 0.0 * gfs_u.grid.xnodes .<= 360.0) + @warn "The particles seem to be in spherical coordinates (lon: [-180 180]), but the provided GFS data is in lon: [0 360]. wind_x_wrap is set to 'true'" + d["wind_x_wrap"] = true # shift longitudes from [0 360] to [-180 180] + else + d["wind_x_wrap"] = false + end + end + u_wind = initialize_interpolation(gfs_u, "10u", t0, NaN, wrap = d["wind_x_wrap"]) # wind velocity x-dir + v_wind = initialize_interpolation(gfs_v, "10v", t0, NaN, wrap = d["wind_x_wrap"]) # wind velocity y-dir +elseif lowercase(d["wind_filetype"]) == "delft3d-fm" + u_wind = initialize_interpolation(dflow_map, interp, "mesh2d_windx", d["reftime"], 0.0, d["time_direction"]) + v_wind = initialize_interpolation(dflow_map, interp, "mesh2d_windy", d["reftime"], 0.0, d["time_direction"]) + error("TODO: make this work.") +elseif lowercase(d["wind_filetype"]) == "zero" + @warn "Running without wind" + u_wind = zero_fun + v_wind = zero_fun +else + error("Invalid wind_filtype: $(d["current_filetype"])") +end + + +# initialize +variables = ["lon", "lat", "age"] +d["variables"] = variables +m = length(variables) +n = d["npartpersource"] * d["nsources"] +p = zeros(m, n) +# ds, s = track_of_drifter!(zeros(3), zeros(3), starttime, t0, 60, drifter) +#p[1,:] .= rand(-1.:0.00001:1., d["nparticles"]) .* d["radius"] .+ d["x"] +#p[2,:] .= rand(-1.:0.00001:1., d["nparticles"]) .* d["radius"] .+ d["y"] +iindex = 1 +for isource = 1:d["nsources"] + xsource = d["x"][isource] + ysource = d["y"][isource] + dx = d["radius"] + dy = dx + for ipart = 1:d["npartpersource"] + global iindex + p[1, iindex] = xsource + dx * randn() + p[2, iindex] = ysource + dy * randn() + iindex += 1 + end +end +d["ids"] = repeat(d["id"], inner = d["npartpersource"]) # repeated ids for each particle +d["nparticles"] = n #NOTE total nr of particles (=nsources*npartpersource) + +d["particles"] = p # initial values + +@info p + +###### Write to netcdf ###### +haskey(d, "write_maps") || (d["write_maps"] = true) +if d["write_maps"] + haskey(d, "write_maps_filename") || (d["write_maps_filename"] = "drifters.nc") # Save data in NetCDF file + haskey(d, "write_maps_dir") || (d["write_maps_dir"] = "netcdf_output") + d["write_maps_times"] = collect(d["tstart"]:d["write_maps_interval"]:d["tend"]) # Time at which data should be written to netcdf +end + +###### Plot maps ###### +haskey(d, "plot_maps") || (d["plot_maps"] = true) +if d["plot_maps"] + haskey(d, "plot_maps_folder") || (d["plot_maps_folder"] = "images_drifter") + d["plot_maps_times"] = collect(d["tstart"]:(d["plot_maps_interval"]):d["tend"]) # Time at which plot should be made + # spatial domain for plots + if !haskey(d, "bbox") + dx = maximum(d["x"]) - minimum(d["x"]) + dy = maximum(d["y"]) - minimum(d["y"]) + d["buffer"] = d["radius"] * 100 + d["bbox"] = [minimum(d["x"]) - d["buffer"], minimum(d["y"]) - d["buffer"], maximum(d["x"]) + d["buffer"], maximum(d["y"]) + d["buffer"]] # Where we expect particles + d["plot_maps_size"] = (1500, 1500) # base this on bbox + end +end + +@info "Using the following " d + +########################### Prepare background image ########################### +if d["plot_maps"] + plot_maps_size = d["plot_maps_size"] + width, height = plot_maps_size + plot_bbox = d["bbox"] + if haskey(d, "plot_background_source") + wms_server = WmsServer(d["plot_background_source"]) + else + #wms_server = WmsServer("emodnet-bathymetry") + wms_server = WmsServer("gebco") + end + img = get_map(wms_server, plot_bbox, width, height) + d["background_image"] = img + + function plot_background(d) + plot_bbox = d["bbox"] + img = d["background_image"] + f = plot_image(img, plot_bbox) + f = plot!(xaxis = ("Longitude \n ", (plot_bbox[1], plot_bbox[3]), font(30)), yaxis = (" \n Latitude", (plot_bbox[2], plot_bbox[4]), font(30)), legend = :topleft, legendfont = font(20), titlefontsize = 25) + return (f) + end + d["plot_maps_background"] = plot_background +end + +###### Velocity function for the particles ###### +function f!(ds, s, t, i, d) + x, y, age = s + z = 0.0 + R = 6371.0e3 # Mean radius of earth from wikipedia + deg2rad = pi / 180.0 # Converts degrees to radians + rad2deg = 180.0 / pi # Converts radians to degrees + up = 0 + vp = 0 + dt = d["dt"] + uw = 0 + vw = 0 + ua = 0 + va = 0 + uw = u(x, y, z, t) + vw = v(x, y, z, t) + uw += u2(x, y, z, t) + vw += v2(x, y, z, t) + if uw != 0 + ua = u_wind(x, y, z, t) + va = v_wind(x, y, z, t) + up = uw + ua * d["leeway_coeff"] + vp = vw + va * d["leeway_coeff"] + end + + # Various models: + # 0: Use drifer data + # track_of_drifter!(ds,s,t,d["reftime"],d["dt"],drifter) + # 1: Only flow velocities + # up = uw + # vp = vw + # 2: Flow plus a factor times wind + # up = uw + ua * d["leeway_coeff"] + # vp = vw + va * d["leeway_coeff"] + # 3: Add stokes drift to flow velocity + # usJ, vsJ = uv_sJ(wh(x,y,z,t),wp(x,y,z,t),wd(x,y,z,t)) + # up = uw+usJ + # vp = vw+vsJ + # 4: Combine flow, stokes drift and wind in an equilibrium for the particle velocity + # usJ, vsJ = uv_sJ(wh(x,y,z,t),wp(x,y,z,t),wd(x,y,z,t)) + # (up,vp) = water_stokes_wind(ua,va, uw,vw,usJ,vsJ) + # 5: Flow plus flow plus a factor times wind (a second flow field to allow for e.g. CMEMS + GTSM flow) + + # Calculate and add turbulent diffusivity, using Pr=1 + # Estimate the Eddy viscosity and its derivates, using a Smagorinsky model + # This is only because the horizontal diffusion is not in the flow output files + #(K, Kdx, Kdy) = estimate_viscosity_smag(interp, x, y, t, u, v) + # WORKAROUND + K = d["K"] + Kdx = 0.0 + Kdy = 0.0 + # TODO: fix estimate_viscosity_smag for regular grids + if !(uw == vw == ua == va == 0.0) + # https://doi.org/10.1016/j.ocemod.2017.11.008 eq. 27 + up += Kdy + randn() * sqrt(2 * K * dt) / dt + vp += Kdx + randn() * sqrt(2 * K * dt) / dt + end + if d["time_direction"] == :backwards + up *= -1 + vp *= -1 + end + + # Convert the velocity in [m/s] to dlat and dlon for latitude and longitude + ds.x = rad2deg * up / (R * cos(deg2rad * s[2])) + ds.y = rad2deg * vp / R + ds.z = 1.0 +end +d["f"] = f! + +run_simulation(d) + +if d["write_maps"] && haskey(d, "npartpersource") && d["npartpersource"] > 1 + import NetCDF + using NetCDF + import Statistics + using Statistics + nsources = d["nsources"] + npartpersource = d["npartpersource"] + + fullfile = joinpath(d["write_maps_dir"], d["write_maps_filename"]) + file = NetCDF.open(fullfile) + gatts = file.gatts + time = ncread(fullfile, "time") + ntimes = length(time) + time_atts = file["time"].atts + finalize(file) + + fullfile_mean = joinpath(d["write_maps_dir"], "source-averaged_" * d["write_maps_filename"]) + if isfile(fullfile_mean) + println("Source-averaged output file exists. Removing file $(fullfile_mean)") + rm(fullfile_mean) + end + nc = NetCDF.create(fullfile_mean, gatts = gatts, mode = NC_NETCDF4) + + for varname in keys(file.vars) + dimnames = [file[varname].dim[i].name for i = 1:file[varname].ndim] + if "time" in dimnames && "particles" in dimnames + data = ncread(fullfile, varname) + nccreate(fullfile_mean, varname, "time", time, "sources", collect(1:1:nsources)) + data_mean = zeros(ntimes, nsources) + for srci = 1:nsources + ind1 = (srci - 1) * npartpersource + 1 + ind2 = srci * npartpersource + data_mean[:,srci] = mean(data[:,ind1:ind2], dims = 2) #todo: take nanmean or skipmissing to avoid mean([1 2 NaN/missing 5]) to become NaN + end + ncwrite(data_mean, fullfile_mean, varname) + ncputatt(fullfile_mean, varname, file[varname].atts) + end + end + ncputatt(fullfile_mean, "time", time_atts) + finalize(fullfile_mean) +end diff --git a/case_gloparts/Particles.jl/src/Particles.jl b/case_gloparts/Particles.jl/src/Particles.jl new file mode 100644 index 0000000..05d3198 --- /dev/null +++ b/case_gloparts/Particles.jl/src/Particles.jl @@ -0,0 +1,50 @@ +module Particles +# Main modules for Particles.jl + +using NetCDF + +include("unstructured_grid.jl") # support for fast indexing of unstructured grids + +include("grids.jl") # generic grid structure + +include("cartesian_grid.jl") # support for fast indexing of cartesian (2D) grids + +include("wms_client.jl") # support for background images downloaded with WMS + +include("dflow.jl") # support for forcing data from Delft3D-FM + +include("era5.jl") # support for ERA5 reanalysis data + +include("matroos_grid.jl") # support for 2d grids in NetCDF from Matroos + +include("cmems_grid.jl") # support for 2d grids in NetCDF from CMEMS + +include("particles_core.jl") # support for background images downloaded with WMS + +include("cli.jl") # support for starting from commandline with TOML or CSV + +# unstructured_grid.jl +export Grid, Interpolator, add_grid!, interpolate, nodes_per_cell, winding_number, find_first_cell, get_values_by_cells!, find_first_cell, find_cells, create_node_tree!, dump, dump_subgrid, dump_array + +# cartesian_grid.jl +export CartesianGrid, dump, in_bbox, find_index, find_index_and_weights, apply_index_and_weights, interpolate, CartesianXYTGrid, get_map_slice, update_cache, weights + +# wms_client.jl +export WmsServer, get_map, plot_image + +# dflow.jl +export load_nc_info, load_dflow_grid, load_nc_var, load_nc_map_slice, find_index, apply_index, get_times, get_reftime, as_DateTime, initialize_interpolation + +# era5.jl +export EraData, load_map_slice, get_reftime, get_times, as_DateTime, initialize_interpolation + +# matroos_grid.jl +export MatroosData # already_exported: load_map_slice, get_reftime, get_times, as_DateTime, initialize_interpolation + +# cmems_grid.jl +export CmemsData, GFSData # already_exported: load_map_slice, get_reftime, get_times, as_DateTime, initialize_interpolation + +# particles_core.jl +export default_userdata, run_simulation, print_times, plot_maps_xy, plot_maps_xz, index + +end # module diff --git a/case_gloparts/Particles.jl/src/cartesian_grid.jl b/case_gloparts/Particles.jl/src/cartesian_grid.jl new file mode 100644 index 0000000..5254304 --- /dev/null +++ b/case_gloparts/Particles.jl/src/cartesian_grid.jl @@ -0,0 +1,324 @@ +# some tools for unstructured grids +# +#CartesianGrid: spatial interpolation on regular x-y grid +# function CartesianGrid(xnodes::Array,ynodes::Array,spherical::Bool=true) +# function dump(grid::CartesianGrid) +# function in_bbox(grid::CartesianGrd,xpoint,ypoint) +# function find_index(grid::CartesianGrid,xpoint,ypoint) +# function find_index_and_weights(grid::CartesianGrid,xpoint,ypoint) +# function apply_index_and_weights(xindices,yindices,weights,values,dummy=0.0) +# function interpolate(grid::CartesianGrid,xpoint::Number,ypoint::Number,values,dummy=0.0) +# CartesianXYTGrid: space-time interpolation +# function CartesianXYTGrid(grid::CartesianGrid,times::AbstractVector,values::AbstractArrayi,name::String,missing_value::Number,scaling=1.0,offset=0.0) +# function interpolate(xyt::CartesianXYTGrid,xpoint::Number,ypoint::Number,time::Number,dummy=0.0) +# function get_map_slice(xyt::CartesianXYTGrid,ti:Integer) +# function update_cache(xyt::CartesianXYTGrid,t) +# function weights(xyt::CartesianXYTGrid,t) + + +debuglevel=1 + +if !@isdefined(CartesianGrid) +mutable struct CartesianGrid <: SpaceGrid + xnodes::Array{Float64,1} #x-coordinates of nodes + ynodes::Array{Float64,1} #y-coordinates of nodes + regular::Bool # assume true for now + #derived data + bbox::Array{Float64,1} #bounding box [xmin xmax ymin ymax] + x0::Float64 + dx::Float64 + y0::Float64 + dy::Float64 + function CartesianGrid(xnodes::Array,ynodes::Array,spherical::Bool=true) + bbox=zeros(Float64,4) + bbox[1]=minimum(xnodes) + bbox[2]=maximum(xnodes) + bbox[3]=minimum(ynodes) + bbox[4]=maximum(ynodes) + x0=xnodes[1] + dx=xnodes[2]-xnodes[1] + y0=ynodes[1] + dy=ynodes[2]-ynodes[1] + return new(xnodes,ynodes,true,bbox,x0,dx,y0,dy) + end +end +end #ifdef + +# +# CartesianGrid functions +# +""" + dump(grid) +Print a summary of the grid to screen. +""" +function dump(grid::CartesianGrid) + println("bbox= $(grid.bbox)") + print("xnodes=") + display(grid.xnodes') + print("ynodes=") + display(grid.ynodes') +end + +""" + boolean_inbox=in_bbox(grid,xpoint,ypoint) +""" +function in_bbox(grid::CartesianGrid,xpoint,ypoint) + result=( (xpoint>=grid.bbox[1]) && (xpoint<=grid.bbox[2]) + && (ypoint>=grid.bbox[3]) && (ypoint<=grid.bbox[4]) ) + return result +end + +""" + m,n = find_index(grid,xpoint,ypoint) +Find index of point in the grid. The value is truncated downward. +""" +function find_index(grid::CartesianGrid,xpoint,ypoint) + if in_bbox(grid,xpoint,ypoint) + x_index=trunc(Int,((xpoint-grid.x0)/grid.dx))+1 + y_index=trunc(Int,((ypoint-grid.y0)/grid.dy))+1 + return (x_index,y_index) + else + return (-1,-1) + end +end + +""" +xindices,yindices,weights = find_index_and_weights(grid,xpoint,ypoint) +Get indices and weights for linear interpolation. +""" +function find_index_and_weights(grid::CartesianGrid,xpoint,ypoint) + if in_bbox(grid,xpoint,ypoint) + x_rel=(xpoint-grid.x0)/grid.dx + y_rel=(ypoint-grid.y0)/grid.dy + xi=trunc(Int,x_rel)+1 + yi=trunc(Int,y_rel)+1 + wx=x_rel-trunc(x_rel) + wy=y_rel-trunc(y_rel) + xindices=(xi,xi+1,xi+1,xi) + yindices=(yi,yi,yi+1,yi+1) + weights=((1-wx)*(1-wy),wx*(1-wy),wx*wy,(1-wx)*wy) + return (xindices,yindices,weights) + else + return ((-1,-1,-1,-1),(-1,-1,-1,-1),(0.0,0.0,0.0,0.0)) + end +end + +""" +value = apply_index_and_weights(xindiced,yindices,weights,values) +Compute interpolated value by applying the weights. +""" +function apply_index_and_weights(xindices,yindices,weights,values,dummy=0.0) + if xindices[1]<0 + return dummy + else + result=0.0 + for i=1:length(xindices) + result+=weights[i]*values[xindices[i],yindices[i]] + end + return result + end +end + +""" +""" +function interpolate(grid::CartesianGrid,xpoint::Number,ypoint::Number,values,dummy=0.0) + xindices,yindices,weights = find_index_and_weights(grid,xpoint,ypoint) + return apply_index_and_weights(xindices,yindices,weights,values,dummy) +end + + +# +# CartesianXYTGrid functions +# + +#if !@isdefined(CartesianXYTGrid) +mutable struct CartesianXYTGrid <: SpaceTimeGrid + grid::CartesianGrid #grids for domain + times::AbstractVector + values::AbstractArray #source array for interpolation + name::String + # + ndims::Int + missing_value::Number #value used for non-valid values + scaling::Float64 + offset::Float64 + # derived data + cache::Array{Any,1} + time_cache::Array{Float64,1} + time_cache_index::Int64 + cache_direction::Symbol + #constructor + """ + xyt=CartesianXYTGrid(grid,times,values,"pressure",missing_value,scaling=1.0,offset=0.0) + Create an xyt item for space-time interpolation. + """ + function CartesianXYTGrid(grid::CartesianGrid,times::AbstractVector,values::AbstractArray,name::String,missing_value::Number,scaling=1.0,offset=0.0,cache_direction::Symbol=:forwards) + (debuglevel>3) && println("initialize CartesianXYTGrid.") + #keep 3 times in memmory + time_cache=zeros(3) + cache=Array{Any,1}(undef,3) + ndims=length(size(values))-1 #time does not count + for ti=1:3 + time_cache[ti]=times[ti] + if ndims==2 + temp=values[:,:,ti] + elseif ndims==3 + temp=values[:,:,:,ti] + else + error("Ndims should be 2 or 3 for a cartesian xyt-grids for now") + end + temp_scaled=offset.+scaling.*temp + temp_scaled[temp.==missing_value].=NaN #use NaN for missing in cache + cache[ti]=temp_scaled + end + time_cache_index=3 #index of last cached field in list of all available times + (debuglevel>3) && println("Initial cache index=$(time_cache_index) ") + if cache_direction!=:forwards&&cache_direction!=:backwards + error("Unexpected symbol for cache_direction, $cache_direction not supported") + end + new(grid,times,values,name,ndims,missing_value,scaling,offset,cache,time_cache,time_cache_index,cache_direction) + end +end +#end #ifdef + +""" +slice = get_map_slice(xyt,ti) +Get time slice of dataset referred to in xyt for time index ti. +""" +function get_map_slice(xyt::CartesianXYTGrid,ti::Integer) + if xyt.ndims==2 + temp=xyt.values[:,:,ti] + elseif xyt.ndims==3 + temp=xyt.values[:,:,:,ti] + end + temp_scaled=xyt.offset.+xyt.scaling.*temp + temp_scaled[temp.==xyt.missing_value].=NaN #use NaN for missing in cache + return temp_scaled +end + +""" +update_cache(xyt,t) +Advance cache to time t. Updating the content of xyt if necessary. +""" +function update_cache(xyt::CartesianXYTGrid,t) + if (t>=xyt.time_cache[1])&&(t<=xyt.time_cache[3]) + (debuglevel>=2) && println("cache is okay") + elseif (t>=xyt.time_cache[2])&&(t<=xyt.times[xyt.time_cache_index+1]) + if(t>xyt.times[end]) + error("Trying to access beyond last map t=$(t) > $(xyt.times_cache[end])") + end + (debuglevel>=2) && println("advance to next time") + xyt.cache[1]=xyt.cache[2] + xyt.cache[2]=xyt.cache[3] + xyt.cache[3]=get_map_slice(xyt,xyt.time_cache_index+1) + xyt.time_cache[1]=xyt.time_cache[2] + xyt.time_cache[2]=xyt.time_cache[3] + xyt.time_cache[3]=xyt.times[xyt.time_cache_index+1] + xyt.time_cache_index+=1 + else #complete refresh of cache + if(t>xyt.times[end]) + error("Trying to access beyond last map t=$(t) > $(xyt.times[end])") + end + if(t=2) && println("refresh cache") + ti=findfirst(tt->tt>t,xyt.times) + (debuglevel>=4) && println("ti=$(ti), t=$(t)") + (debuglevel>=4) && println("$(xyt.times)") + xyt.time_cache[1]=xyt.times[ti-1] + xyt.time_cache[2]=xyt.times[ti] + xyt.time_cache[3]=xyt.times[ti+1] + xyt.cache[1]=get_map_slice(xyt,ti-1) + xyt.cache[2]=get_map_slice(xyt,ti) + xyt.cache[3]=get_map_slice(xyt,ti+1) + xyt.time_cache_index=ti+1 + end + (debuglevel>=4) && println("$(xyt.time_cache_index) $(xyt.time_cache[1]) $(xyt.time_cache[2]) $(xyt.time_cache[3]) ") +end + +""" +update_cache_backwards(xyt,t) +Advance cache to time t backwards in time. Updating the content of xyt if necessary. +""" +function update_cache_backwards(xyt::CartesianXYTGrid,t) + if (t>=xyt.time_cache[1])&&(t<=xyt.time_cache[3]) + (debuglevel>=2) && println("cache is okay") + elseif (t>=xyt.time_cache[2])&&(t<=xyt.times[xyt.time_cache_index+1]) + if(t>xyt.times[end]) + error("Trying to access beyond last map t=$(t) > $(xyt.times_cache[end])") + end + (debuglevel>=2) && println("advance to next time") + xyt.cache[3]=xyt.cache[2] + xyt.cache[2]=xyt.cache[1] + xyt.cache[1]=get_map_slice(xyt,xyt.time_cache_index-1) + xyt.time_cache[3]=xyt.time_cache[2] + xyt.time_cache[2]=xyt.time_cache[1] + xyt.time_cache[1]=xyt.times[xyt.time_cache_index-1] + xyt.time_cache_index-=1 + else #complete refresh of cache + if(t>xyt.times[end]) + error("Trying to access beyond last map t=$(t) > $(xyt.times[end])") + end + if(t=2) && println("refresh cache") + ti=findfirst(tt->tt>t,xyt.times) + (debuglevel>=4) && println("ti=$(ti), t=$(t)") + (debuglevel>=4) && println("$(xyt.times)") + xyt.time_cache[1]=xyt.times[ti-2] + xyt.time_cache[2]=xyt.times[ti-1] + xyt.time_cache[3]=xyt.times[ti] + xyt.cache[1]=get_map_slice(xyt,ti-2) + xyt.cache[2]=get_map_slice(xyt,ti-1) + xyt.cache[3]=get_map_slice(xyt,ti) + xyt.time_cache_index=ti-2 + end + (debuglevel>=4) && println("$(xyt.time_cache_index) $(xyt.time_cache[1]) $(xyt.time_cache[2]) $(xyt.time_cache[3]) ") +end + +""" +(w1,w2,w3) = weights(xyt,t) +Compute weights for (linear) time interpolation to time t based on 3 cached times +This function assumes that the cache is up-to-date. +""" +function weights(xyt::CartesianXYTGrid,t) + if txyt.time_cache[3] + throw(ArgumentError("t outside cached time")) + end + if (t>xyt.time_cache[2]) + w=(t-xyt.time_cache[2])/(xyt.time_cache[3]-xyt.time_cache[2]) + return (0.0,(1.0-w),w) + else + w=(t-xyt.time_cache[1])/(xyt.time_cache[2]-xyt.time_cache[1]) + return ((1.0-w),w,0.0) + end +end + +""" +value = interpolate(xyt,x,y,t,0.0) +Interpolate in space and time. +""" +function interpolate(xyt::CartesianXYTGrid,xpoint::Number,ypoint::Number,time::Number,dummy=0.0) + if xyt.cache_direction==:forwards + update_cache(xyt,time) + elseif xyt.cache_direction==:backwards + update_cache_backwards(xyt,time) + end + w=weights(xyt,time) + value=0.0 + if xyt.ndims==2 + for ti=1:3 + value+=w[ti]*interpolate(xyt.grid,xpoint,ypoint,xyt.cache[ti],NaN) + end + elseif xyt.ndims==3 + for ti=1:3 + value+=w[ti]*interpolate(xyt.grid,xpoint,ypoint,dropdims(xyt.cache[ti],dims=3),NaN) + end + end + if isnan(value) + value=dummy + end + return value +end diff --git a/case_gloparts/Particles.jl/src/cli.jl b/case_gloparts/Particles.jl/src/cli.jl new file mode 100644 index 0000000..bdee44a --- /dev/null +++ b/case_gloparts/Particles.jl/src/cli.jl @@ -0,0 +1,35 @@ +using TOML +using CSV + +function run_simulation() + usage = "Usage: julia -e 'using Particles; run_simulation()' '/path/to/config.toml/csv'" + n = length(ARGS) + if n != 1 + throw(ArgumentError(usage)) + end + config_path = only(ARGS) + if !isfile(config_path) + throw(ArgumentError("File not found: $(config_path)\n" * usage)) + end + run_simulation(config_path) +end + + +function run_simulation(path::AbstractString) + d = config(path) + run_simulation(d) +end + +function config(path::AbstractString) + _, ext = splitext(path) + if ext == ".csv" + c = CSV.File(path; normalizenames=true) + length(c) < 1 && error("Empty csv provided.") + config = Dict(string.(keys(c[1])) .=> values(c[1])) + elseif ext == ".toml" + config = TOML.parsefile(path) + else + error("Unknown file format $ext provided.") + end + merge(default_userdata(), config) +end diff --git a/case_gloparts/Particles.jl/src/cmems_grid.jl b/case_gloparts/Particles.jl/src/cmems_grid.jl new file mode 100644 index 0000000..e4f9d0d --- /dev/null +++ b/case_gloparts/Particles.jl/src/cmems_grid.jl @@ -0,0 +1,279 @@ +# Interact with CMEMS netcdf output +# basically these are lon-lat regular NetCDF files. +# These routines are tested with data from the copernicus +# climate data store +# For more see: https://marine.copernicus.eu +# +# function CmemsData(path,filename) +# function load_map_slice(data::CmemsData,varname,itime) +# function get_reftime(data::CmemsData) +# function get_times(data::CmemsData,reftime::DateTime) +# function as_DateTime(data::CmemsData,reftime::DateTime,relative_time) +# function initialize_interpolation(data::CmemsData,varname::String,reftime::DateTime,dummy=0.0) + + +using NetCDF +using Dates +using Glob + +debuglevel = 1 #0-nothing, larger more output + +""" +Using the CmemsData struct the file-handling becomes object-oriented. +""" +mutable struct CmemsData + file::NcFile + #derived data + grid::CartesianGrid + """ + Constructor + cmems_data = CmemsData(".", "my_cmems_file.nc") + cmems_datas = CmemsData("data/2022", r"07/CMEMS/my_cmems_file_part.+.nc") # using Regex + cmems_datas = CmemsData("data/2022", "*/CMEMS/my_cmems_file_part*.nc") # using Glob (* = wildcard) + """ + function CmemsData(path, filename; lon = "longitude", lat = "latitude", reftime = DateTime(1950,1,1), tstart = nothing, tend=nothing) + if isnothing(tstart) && isnothing(tend) + if isa(filename, Regex) + filenames = filter(x -> ~isnothing(match(filename, x)), readdir(path)) + filenames = [joinpath(path, filename) for filename = filenames] + elseif occursin("*", filename) + filenames = glob(filename, path) + else + filenames = [joinpath(path, filename)] + end + else + dt_tstart = DateTime(reftime) + Second(tstart) - Day(1) #1-day forecasts (to simulate 12 Sep, you can use data from 6 Sep) + dt_tend = DateTime(reftime) + Second(tend) + Day(1) #+1 day to make sure tend is also used in time range + filenames = [] + for date in dt_tstart:Dates.Day(1):dt_tend + yyyy = Dates.format(date, "yyyy") + mm = Dates.format(date, "mm") + dd = Dates.format(date, "dd") + glob_results = glob("$(yyyy)/$(mm)/cmems/$(dd)/simulated/Import_CMEMS_*/timeseries/$(filename)", path) + # glob_results=glob("$(yyyy)/$(mm)/cmems/$(dd)/observed/$(filename)", path) + push!(filenames, glob_results...) + end + println(filenames) + end + + map = [] + for filename = filenames + file = NetCDF.open(filename) + x = collect(file.vars[lon]) + y = collect(file.vars[lat]) + grid = CartesianGrid(x, y, true) + push!(map, new(file, grid)) + end + length(map) != 1 || (map = map[1]) # backward compatibility + return map + end +end + +""" +Using the GFSData struct the file-handling becomes object-oriented. +""" +mutable struct GFSData + file::NcFile + #derived data + grid::CartesianGrid + """ + Constructor + gfs_data = GFSData(".","my_gfs_file.nc") + gfs_datas = GFSData("data/2022", r"07/CMEMS/my_gfs_file_part.+.nc") # using Regex + gfs_datas = GFSData("data/2022", "*/CMEMS/my_gfs_file_part*.nc") # using Glob (* = wildcard) + """ + function GFSData(path, filename; lon = "x", lat = "y", reftime = DateTime(1950,1,1), tstart = 0.0, tend=3155760000.0) + if isnothing(tstart) && isnothing(tend) + if isa(filename, Regex) + filenames = filter(x -> ~isnothing(match(filename, x)), readdir(path)) + filenames = [joinpath(path, filename) for filename = filenames] + elseif occursin("*", filename) + filenames = glob(filename, path) + else + filenames = [joinpath(path, filename)] + end + else + dt_tstart = DateTime(reftime) + Second(tstart) - Day(10) #10-day forecasts (to simulate 12 Sep, you can use data from 6 Sep) + dt_tend = DateTime(reftime) + Second(tend) + Day(1) #+1 day to make sure tend is also used in time range + filenames = [] + for date in dt_tstart:Dates.Day(1):dt_tend + yyyy = Dates.format(date, "yyyy") + mm = Dates.format(date, "mm") + dd = Dates.format(date, "dd") + glob_results = glob("$(yyyy)/$(mm)/gfs/$(dd)/external_forecasts/meteo_*/$(filename)", path) + push!(filenames, glob_results...) + end + println(filenames) + end + + map = [] + for filename = filenames + file = NetCDF.open(filename) + x = collect(file.vars[lon]) + y = collect(file.vars[lat]) + grid = CartesianGrid(x, y, true) + push!(map, new(file, grid)) + end + length(map) != 1 || (map = map[1]) # backward compatibility + return map + end +end + +""" + wind_x = load_map_slice(cmems_data,"u10",1) +Load data for a time-dependent variable for a specific time. +""" +function load_map_slice(data::CmemsData, varname, itime) + ndims = length(size(data.file[varname])) - 1 #only spatial dims + scale_factor = data.file[varname].atts["scale_factor"] + offset = data.file[varname].atts["add_offset"] + dummy = data.file[varname].atts["_FillValue"] + if ndims == 2 + tempvar = data.file[varname][:, :, itime] + if length(size(tempvar)) == 3 # Starting around julia version 1.3 singleton dimensions are dropped automatically when slicing. + tempvar = dropdims(tempvar, dims = 3) + end + var = offset .+ scale_factor .* tempvar + var[tempvar.==dummy] .= NaN + return var + else #TODO default to surface for 3D fields + tempvar = data.file[varname][:, :, 1, itime] + var = offset .+ scale_factor .* tempvar + var[tempvar.==dummy] .= NaN + return var + end +end + +""" + t_ref = get_reftime(cmems_data) +Read the reference time from the attributes of time in the netcdf file. +Times are described by a reference time (DateTime type) and the number of hours +relative to this t_ref. +""" +function get_reftime(data::Union{GFSData,CmemsData}) + time_relative = data.file.vars["time"] + units = time_relative.atts["units"] + temp = split(units, "since") + temp2 = split(temp[2], ".") + (debuglevel >= 2) && println("ref date $(temp2[1])") + t0 = DateTime(strip(temp2[1]), "yyyy-mm-dd HH:MM:SS") + dt_seconds = 1.0 + if startswith(temp[1], "seconds") + dt_seconds = 1.0 + elseif startswith(temp[1], "minutes") + dt_seconds = 60.0 + elseif startswith(temp[1], "hours") + dt_seconds = 3600.0 + elseif startswith(temp[1], "days") + dt_seconds = 24.0 * 3600.0 + else + error("Invalid time-step unit in map-file.") + end + seconds_per_day = 24.0 * 3600.0 + if debuglevel > 2 + println("$(dt_seconds), $(t0), $(seconds_per_day), $(time_relative[1])") + end + relative_days_in_seconds = div(dt_seconds * time_relative[1], seconds_per_day) * seconds_per_day + return t0 + Dates.Second(relative_days_in_seconds) +end + +""" + times=get_times(cmems_data,Dates.DateTime(2019,1,1)) + +Get available times in netcdf map files as a range in seconds, eg 0.0:3600.0:7200.0 +The reftime (second arg) is a Dates.DateTime can be used to change the reftime to something +convenient. With reftime=get_reftime(map) you can find the time of the first map, which is often +a nice default. +""" +function get_times(data::Union{GFSData,CmemsData}, reftime::DateTime) + time_relative = data.file.vars["time"] + units = time_relative.atts["units"] + temp = split(units, "since") + temp2 = split(temp[2], ".") + println("ref date $(temp2[1])") + t0 = DateTime(strip(temp2[1]), "yyyy-mm-dd HH:MM:SS") + dt_seconds = 1.0 + if startswith(temp[1], "seconds") + dt_seconds = 1.0 + elseif startswith(temp[1], "minutes") + dt_seconds = 60.0 + elseif startswith(temp[1], "hours") + dt_seconds = 3600.0 + elseif startswith(temp[1], "days") + dt_seconds = 24.0 * 3600.0 + else + error("Invalid time-step unit in map-file.") + end + times = [] + for ti = 1:length(time_relative) + push!(times, (0.001 * Dates.value(t0 - reftime)) + time_relative[ti] * dt_seconds) + end + return times +end + +""" + t = as_DateTime(data,reftime,relative_time) +Convert a reftime (DateTime) and relative_time (Float64 with seconds) relative to this. +Returns a DateTime type. +""" +function as_DateTime(data::CmemsData, reftime::DateTime, relative_time) + reftime + Float64(relative_time) * Dates.Second(1) +end + +""" +p = initialize_interpolation(cmems,"msl",t0) +Create an interpolation function p(x,y,z,t) +""" +function initialize_interpolation(data, varname::String, reftime::DateTime, dummy = 0.0, cache_direction::Symbol = :forwards; wrap = false) + !isa(data, CmemsData) || (data = [data]) # to allow for looping over an array of CmemsData + !isa(data, GFSData) || (data = [data]) # to allow for looping over an array of GFSData + + times_per_file = [get_times(data[i], reftime) for i in 1:length(data)] + + function get_xyt(data, data_ind) + times = times_per_file[data_ind] + values = data[data_ind].file.vars[varname] #TODO more checks + missing_value = values.atts["_FillValue"] + if haskey(values.atts, "scale_factor") + scaling = values.atts["scale_factor"] + else + scaling = 1.0 + end + if haskey(values.atts, "add_offset") + offset = values.atts["add_offset"] + else + offset = 0.0 + end + xyt = CartesianXYTGrid(data[data_ind].grid, times, values, varname, missing_value, scaling, offset, cache_direction) + return xyt + end + + xyts = Array{Any,1}(undef, length(data)) + function update_xyt(x,y,t) + intime = [(t >= times[1]) && (t <= times[end]) for times in times_per_file] + inspace = [in_bbox(data[i].grid,x,y) for i in 1:length(data)] + data_ind_needed = findlast((intime .* inspace)) + if isnothing(data_ind_needed) + data_ind_needed = 1 # just continue with first map and get an error-message during the interpolation + end + if isassigned(xyts, data_ind_needed) + (debuglevel >= 2) && println("data is already cached") + else # load additional xyt + (debuglevel >= 1) && println("t = $(t) - Reading data from file: ../$(joinpath(splitpath(data[data_ind_needed].file.name)[end-2:end]...))") + xyts[data_ind_needed] = get_xyt(data, data_ind_needed) + end + xyt = xyts[data_ind_needed] + return xyt + end + + function f(x, y, z, t) + # GFS Grid is 0:360 instead of -180:180 + if wrap && (x < 0) + x += 360 + end + xyt = update_xyt(x,y,t) + value = interpolate(xyt, x, y, t, dummy) + return value + end + return f +end diff --git a/case_gloparts/Particles.jl/src/dflow.jl b/case_gloparts/Particles.jl/src/dflow.jl new file mode 100644 index 0000000..82256b7 --- /dev/null +++ b/case_gloparts/Particles.jl/src/dflow.jl @@ -0,0 +1,450 @@ +# Interact with dflow netcdf output map files +# + +using NetCDF +using Dates +using Glob +# include("unstructured_grid.jl") + +debuglevel = 1 # 0-nothing, larger more output + + +""" + map=load_nc_info(path,filename) +Load meta-data of all map-files, i.e. for all domains in the filename. +""" +function load_nc_info(path, filename; reftime = DateTime(1950,1,1), tstart = nothing, tend=nothing) + if isnothing(tstart) && isnothing(tend) + if isa(filename, Regex) # filename used to be a Regex-type (filename_regex::Regex) + filenames = filter(x -> ~isnothing(match(filename, x)), readdir(path)) + filenames = [joinpath(path, filename) for filename = filenames] + elseif occursin("*", filename) + filenames = glob(filename, path) + else + filenames = [joinpath(path, filename)] + end + else + dt_tstart = DateTime(reftime) + Second(tstart) + dt_tend = DateTime(reftime) + Second(tend) + filenames = [] + for date in dt_tstart:Dates.Day(1):dt_tend + yyyy = Dates.format(date, "yyyy") + mm = Dates.format(date, "mm") + dd = Dates.format(date, "dd") + glob_results = glob("$(yyyy)/$(mm)/gtsm/$(dd)/simulated/WF_DflowFM_gtsm4_1.25eu_surge_*/timeseries/$(filename)", path) + push!(filenames, glob_results...) + end + end + + map = [] + for filename = filenames + @info filename + push!(map, NetCDF.open(filename)) + end + return map +end + +""" + interp=load_dflow_grid(map,nmin=50,spherical=true) +Create spatial index for a dflow netcdf map file. +map should contain an array of netcdf info's as created eg by: +load_nc_info. +""" +function load_dflow_grid(map, nmin=50, spherical=true) + interp = Interpolator() + println("compute index:") + map_names = [] + for i = 1:length(map) + # only load the grid with a certain filename once + map_name = splitpath(map[i].name)[end] + if in(map_name, map_names) + break + else + push!(map_names, map_name) + end + if haskey(map[i].vars, "NetElemNode") # Old FM variable name + edges_temp = map[i].vars["NetElemNode"][:,:] + elseif haskey(map[i].vars, "mesh2d_face_nodes") # New FM variable name + edges_temp = map[i].vars["mesh2d_face_nodes"][:,:] + elseif haskey(map[i].vars, "Mesh_face_nodes") # FEWS variable name + edges_temp = map[i].vars["Mesh_face_nodes"][:,:] + else + error("Variable 'mesh2d_face_nodes' (or similar) is missing in D-Flow FM file") + end + if haskey(map[i].vars, "NetNode_x") # Old FM variable names + xnodes_temp = map[i].vars["NetNode_x"][:] + ynodes_temp = map[i].vars["NetNode_y"][:] + elseif haskey(map[i].vars, "mesh2d_node_x") # New FM variable names + xnodes_temp = map[i].vars["mesh2d_node_x"][:] + ynodes_temp = map[i].vars["mesh2d_node_y"][:] + elseif haskey(map[i].vars, "Mesh_node_x") # FEWS variable name + xnodes_temp = map[i].vars["Mesh_node_x"][:] + ynodes_temp = map[i].vars["Mesh_node_y"][:] + else + error("Variable 'mesh2d_node_x' (or similar) is missing in D-Flow FM file") + end + println("- $(map[i].name)") + grid_temp = Grid(xnodes_temp, ynodes_temp, edges_temp, nmin, spherical) + # dump(grid_temp) + add_grid!(interp, grid_temp) + end + return interp +end + +""" + depth = load_nc_var(map,"depth") +Load data of a variable foreach domain. The variable should be be time-independent. +""" +function load_nc_var(map, varname) + result = [] + for i = 1:length(map) + push!(result, map[i][varname][:]) + end + return result +end + +""" + waterlevel = load_nc_map_slice(map,"s1",1) +Load data for a time-dependent variable for a specific time and for all domains. +""" +function load_nc_map_slice(map, varname, itime, sigma_layer_index=-1, domainnr = 0) + result = [] + domainnr != 0 || (domainnr = 1:length(map)) + for i in domainnr + # push!(result,map[i][varname][:,itime]) + if ndims(map[i][varname]) == 2 + push!(result, map[i][varname][:,itime]) + elseif ndims(map[i][varname]) == 3 + if sigma_layer_index > 0 + # if sigma_layer_index <= size(map[i][varname])[1] + push!(result, map[i][varname][sigma_layer_index,:,itime]) + # else + # throw(BoundsError(map[i][varname], sigma_layer_index)) + # end + else + # For now only the upper sigma layer + push!(result, map[i][varname][end,:,itime]) + end + else + throw(ArgumentError("load_nc_map_slice has only supports 2/3 dimensions")) + end + if i == domainnr + if haskey(map[i][varname].atts, "scale_factor") + scaling = map[i][varname].atts["scale_factor"] + else + scaling = 1.0 + end + if haskey(map[i][varname].atts, "add_offset") + offset = map[i][varname].atts["add_offset"] + if offset > 0 + error("Fix is needed for add_offset > 0.") + end + end + if !isempty(result) + result .= scaling .* result + end + end + end + return result +end + + +""" + times=get_times(map,Dates.DateTime(2019,1,1)) + +Get available times im netcdf map files as a range in seconds, eg 0.0:3600.0:7200.0 +The reftime (second arg) is a Dates.DateTime can be used to change the reftime to something +convenient. With reftime=get_reftime(map) you can find the time of the first map, which is often +a nice default. +""" +function get_times(map, reftime::DateTime) + time_relative = map[1].vars["time"] + units = time_relative.atts["units"] + temp = split(units, "since") + if endswith(temp[2],".0 +0000") + t0 = DateTime(strip(temp[2]), "yyyy-mm-dd HH:MM:SS.0 +0000") # format used in FEWS + else + t0 = DateTime(strip(temp[2]), "yyyy-mm-dd HH:MM:SS +00:00") + end + dt_seconds = 1.0 + if startswith(temp[1], "seconds") + dt_seconds = 1.0 + elseif startswith(temp[1], "minutes") + dt_seconds = 60.0 + elseif startswith(temp[1], "hours") + dt_seconds = 3600.0 + elseif startswith(temp[1], "days") + dt_seconds = 24.0 * 3600.0 + else + error("Invalid time-step unit in map-file.") + end + # if ((time_relative[2]-time_relative[1])>(1.1*(time_relative[3]-time_relative[2]))) + # #check for dummy initial field + # t_start = (0.001*Dates.value(t0-reftime))+time_relative[2]*dt_seconds + # t_step = (time_relative[3]-time_relative[2])*dt_seconds + # t_stop = (0.001*Dates.value(t0-reftime))+time_relative[end]*dt_seconds + # times = t_start:t_step:t_stop + # else + # t_start = (0.001*Dates.value(t0-reftime))+time_relative[1]*dt_seconds + # t_step = (time_relative[2]-time_relative[1])*dt_seconds + # t_stop = (0.001*Dates.value(t0-reftime))+time_relative[end]*dt_seconds + # times = t_start:t_step:t_stop + # end + times = [] + for ti = 1:length(time_relative) + push!(times, (0.001 * Dates.value(t0 - reftime)) + time_relative[ti] * dt_seconds) + end + return times +end + +""" + t_ref = get_reftime(map) +Read the reference time from the attributes of time in the netcdf file. +Times are described by a reference time (DateTime type) and the number of hours +relative to this t_ref. +Note that this function returns the first timestamp in the file (which + is not always the same as the RefDate from the .mdu or the + "since text" in the time-attribute of the netcdf file) +Suggestion: use get_reftime to get the reference date and also use + get_reftime within get_times +""" +function get_reftime(map) + time_relative = map[1].vars["time"] + units = time_relative.atts["units"] + temp = split(units, "since") + if endswith(temp[2],"0 +00:00") + t0 = DateTime(strip(temp[2]), "yyyy-mm-dd HH:MM:SS +00:00") + else + t0 = DateTime(strip(temp[2]), "yyyy-mm-dd HH:MM:SS") + end + dt_seconds = 1.0 + if startswith(temp[1], "seconds") + dt_seconds = 1.0 + elseif startwith(temp[1], "minutes") + dt_seconds = 60.0 + elseif startwith(temp[1], "hours") + dt_seconds = 3600.0 + elseif startwith(temp[1], "days") + dt_seconds = 24.0 * 3600.0 + else + error("Invalid time-step unit in map-file.") + end + seconds_per_day = 24.0 * 3600.0 + # println("$(dt_seconds), $(t0), $(seconds_per_day), $(time_relative[1])") + relative_days_in_seconds = div(dt_seconds * time_relative[1], seconds_per_day) * seconds_per_day + return t0 + Dates.Second(relative_days_in_seconds) +end + +""" + t = as_DateTime(reftime,relative_time) +Convert a reftime (DateTime) and relative_time (Float64 with hours) relative to this. +Returns a DateTime type. +""" +function as_DateTime(reftime::DateTime, relative_time) + reftime + Float64(relative_time) * Dates.Second(1) +end + +""" + u,v=initialize_interpolation(dflow_map,interp::Interpolator,varname,reftime::DateTime,dumval=0.0,cache_direction::Symbol=:forwards) + +Create interpolation functions for x and y (lon and lat) directions of varname in dflow_map. +The reftime can be chosen freely independent of the reftime in the input files. +""" + +function initialize_interpolation(dflow_map, interp::Interpolator, varname, reftime::DateTime, dumval=0.0, cache_direction::Symbol=:forwards) + println("initialize caching for $(dflow_map[1].name) $varname...") + # map_cache=dflow_map + # interp_cache=interp + # dumval_cache=dumval + # reftime_cache=reftime + times_cache = get_times(dflow_map, reftime) + no_domains = length(interp.grids) + multiple_runs = length(dflow_map) > no_domains # multiple runs per domain available + if multiple_runs + times_cache_per_run = [get_times([dflow_map[i]], reftime) for i in 1:no_domains:length(dflow_map)] + dflow_map_all = dflow_map + dflow_map_ind = 1:no_domains + dflow_map = dflow_map[dflow_map_ind] + end + + # keep 3 times in memory + time_cache = zeros(3) + time_cache_all_domains = zeros(3,no_domains) + var_cache = Array{Any,1}(undef, 3) + initialized = false + if !haskey(dflow_map[1].vars, varname) + varname_stripped = replace(varname, "mesh2d_" => "") # e.g. mesh2d_ucx => ucx + if haskey(dflow_map[1].vars, varname_stripped) + println("Renaming varname to $varname_stripped as $varname is not a variable in the map, but $varname_stripped is.") + varname = varname_stripped + else + throw(ArgumentError("$varname is not a variable in the map")) + end + end + time_cache_all_domains = repeat(times_cache[1:3], 1, no_domains) + time_cache_index_all_domains = repeat([3], no_domains) # index of last cached field in list of all available times + for ti = 1:3 + var_cache[ti] = load_nc_map_slice(dflow_map, varname, ti) + end + (debuglevel > 4) && println("Initial cache index=$(time_cache_index_all_domains[1]) ") + """ + update_cache_forwards(t) + Refresh the cached fields in the forwards time directions if needed. + """ + function update_cache_forwards(t, domainnr) + time_cache = time_cache_all_domains[:, domainnr] + time_cache_index = time_cache_index_all_domains[domainnr] + + # First, update dflow_map and refresh cache + if multiple_runs + run_ind_needed = findlast([(t >= times[1]) && (t <= times[end]) for times in times_cache_per_run]) + if !isnothing(run_ind_needed) + dflow_map_ind_needed = ((run_ind_needed-1)*no_domains+1) : (run_ind_needed*no_domains) + if dflow_map_ind_needed != dflow_map_ind # update of dflow_map needed (e.g. using run02/run_0000_map.nc instead of run01/run_0000_map.nc) + (debuglevel >= 1) && println("t = $(t) - Update '$(varname)'-data using: ../$(joinpath(splitpath(dflow_map_all[dflow_map_ind_needed[domainnr]].name)[end-2:end]...))") + dflow_map_ind = dflow_map_ind_needed + dflow_map = dflow_map_all[dflow_map_ind] + times_cache = times_cache_per_run[run_ind_needed] + for ti = 1:3 + time_cache[ti] = times_cache[ti] + var_cache[ti][domainnr] = load_nc_map_slice(dflow_map, varname, ti, -1, domainnr)[1] + end + time_cache_index = 3 # index of last cached field in list of all available times + end + end + end + + # Next, update cache of this dflow_map + if (t >= time_cache[1]) && (t <= time_cache[3]) + (debuglevel >= 2) && println("cache is okay") + elseif t > times_cache[end] + error("Trying to access beyond last map t=$(t) > $(times_cache[end])") + elseif (t >= time_cache[2]) && (t <= times_cache[time_cache_index + 1]) + (debuglevel >= 2) && println("advance to next time") + time_cache[1] = time_cache[2] + time_cache[2] = time_cache[3] + time_cache[3] = times_cache[time_cache_index + 1] + var_cache[1][domainnr] = var_cache[2][domainnr] + var_cache[2][domainnr] = var_cache[3][domainnr] + var_cache[3][domainnr] = load_nc_map_slice(dflow_map, varname, time_cache_index + 1, -1, domainnr)[1] + time_cache_index += 1 + elseif t < times_cache[1] + error("Trying to access before first map t=$(t) < $(times_cache[1])") + else # complete refresh of cache + (debuglevel >= 2) && println("refresh cache") + ti = findfirst(tt -> tt > t, times_cache) + (ti != length(times_cache)) || (ti = ti - 1) + (ti != 1) || (ti = ti + 1) + (debuglevel >= 4) && println("ti=$(ti), t=$(t)") + (debuglevel >= 4) && println("$(times_cache)") + time_cache[1] = times_cache[ti - 1] + time_cache[2] = times_cache[ti] + time_cache[3] = times_cache[ti + 1] + var_cache[1][domainnr] = load_nc_map_slice(dflow_map, varname, ti - 1, -1, domainnr)[1] + var_cache[2][domainnr] = load_nc_map_slice(dflow_map, varname, ti , -1, domainnr)[1] + var_cache[3][domainnr] = load_nc_map_slice(dflow_map, varname, ti + 1, -1, domainnr)[1] + time_cache_index = ti + 1 + end + (debuglevel >= 4) && println("$(time_cache_index) $(time_cache[1]) $(time_cache[2]) $(time_cache[3]) ") + + time_cache_all_domains[:, domainnr] = time_cache + time_cache_index_all_domains[domainnr] = time_cache_index + end + + """ + update_cache_backwards(t) + Refresh the cached fields in the backwards time direction if needed. + """ + function update_cache_backwards(t) + if (t >= time_cache[1]) && (t <= time_cache[3]) + (debuglevel >= 2) && println("cache is okay") + elseif t < times_cache[1] + error("Trying to access before first map t=$(t) < $(times_cache[1])") + elseif (t <= time_cache[2]) && (t >= times_cache[time_cache_index - 1]) + (debuglevel >= 2) && println("advance to next time") + time_cache[3] = time_cache[2] + time_cache[2] = time_cache[1] + time_cache[1] = times_cache[time_cache_index - 1] + var_cache[3] = var_cache[2] + var_cache[2] = var_cache[1] + var_cache[1] = load_nc_map_slice(dflow_map, varname, time_cache_index - 1) + time_cache_index -= 1 + elseif t > times_cache[end] + error("Trying to access beyond last map t=$(t) > $(times_cache[end])") + else # complete refresh of cache + (debuglevel >= 2) && println("refresh cache") + ti = findfirst(tt -> tt > t, times_cache) + (debuglevel >= 4) && println("ti=$(ti), t=$(t)") + (debuglevel >= 4) && println("$(times_cache)") + time_cache[1] = times_cache[ti - 2] + time_cache[2] = times_cache[ti - 1] + time_cache[3] = times_cache[ti] + var_cache[1] = load_nc_map_slice(dflow_map, varname, ti - 2) + var_cache[2] = load_nc_map_slice(dflow_map, varname, ti - 1) + var_cache[3] = load_nc_map_slice(dflow_map, varname, ti) + time_cache_index = ti - 2 + end + (debuglevel >= 4) && println("$(time_cache_index) $(time_cache[1]) $(time_cache[2]) $(time_cache[3]) ") + initialized = true + end + + """ + (w1,w2,w3) = weights(t) + Compute weights for (linear) time interpolation to time t based on 3 cached times + This function assumes that the cache is up-to-date. + """ + function weights(t) + if t < time_cache[1] || t > time_cache[3] + throw(ArgumentError("t outside cached time")) + end + if (t > time_cache[2]) + w = (t - time_cache[2]) / (time_cache[3] - time_cache[2]) + return (0.0, (1.0 - w), w) + else + w = (t - time_cache[1]) / (time_cache[2] - time_cache[1]) + return ((1.0 - w), w, 0.0) + end + end + # flow in x direction (for now has to be called u) + function f(x, y, z, t) + ind = find_index(interp, x, y) + if ind[1] == -1; return dumval; end + if cache_direction == :forwards + update_cache_forwards(t, ind[1]) + elseif cache_direction == :backwards + update_cache_backwards(t) + end + w = weights(t) + (debuglevel > 3) && println("weights $(weights)") + value = 0.0 + for ti = 1:3 + temp = apply_index(ind, var_cache[ti], dumval) + value += w[ti] * temp + (debuglevel > 4) && println(" add w*val = $(w[ti]) $(temp)") + end + if abs(value) > 100.0 && debuglevel >= 2 + println("$varname interpolation") + println("w $(weights) : $(ind)") + end + return value + end + return f +end + +""" + u,v=initialize_interpolation(dflow_map,interp::Interpolator,reftime::DateTime,dumval=0.0) + +Create interpolation functions for x and y (lon and lat) directions of the flow. +The reftime can be chosen freely independent of the reftime in the input files. +""" +function initialize_interpolation(dflow_map, interp::Interpolator, reftime::DateTime, dumval=0.0) + ucx = "ucx" + ucy = "ucy" + if haskey(dflow_map[1].vars, "mesh2d_ucx") + ucx = "mesh2d_ucx" + ucy = "mesh2d_ucy" + end + u = initialize_interpolation(dflow_map, interp, ucx, reftime, dumval) + v = initialize_interpolation(dflow_map, interp, ucy, reftime, dumval) + return u, v +end diff --git a/case_gloparts/Particles.jl/src/dflow_his_to_zarr.jl b/case_gloparts/Particles.jl/src/dflow_his_to_zarr.jl new file mode 100644 index 0000000..8b53ecd --- /dev/null +++ b/case_gloparts/Particles.jl/src/dflow_his_to_zarr.jl @@ -0,0 +1,441 @@ +# +# Convert delft3d time-series to zarr format +# This add selection, compression and chuning into separate files per chunk +# +using TOML +using NetCDF +using Zarr +using Dates + +debuglevel=1 + +# +# defaults +# +try_vars = ["waterlevel","salinity","temperature","x_velocity","y_velocity","vicww"] #variables to look for in hisfile +defaults = Dict( + "waterlevel" => Dict( + "scale_factor" => 0.001, + "add_offset" => 0.0, + "data_type" => "Int16", + "_FillValue" => 9999 ), + "salinity" => Dict( + "scale_factor" => 0.01, + "add_offset" => 0.0, + "data_type" => "Int16", + "_FillValue" => 9999 ), + "temperature" => Dict( + "scale_factor" => 0.01, + "add_offset" => 0.0, + "data_type" => "Int16", + "_FillValue" => 9999 ), + "x_velocity" => Dict( + "scale_factor" => 0.01, + "add_offset" => 0.0, + "data_type" => "Int16", + "_FillValue" => 9999 ), + "y_velocity" => Dict( + "scale_factor" => 0.01, + "add_offset" => 0.0, + "data_type" => "Int16", + "_FillValue" => 9999 ), + "vicww" => Dict( + "scale_factor" => 1e-6, + "add_offset" => 0.0, + "data_type" => "Int16", + "_FillValue" => 9999 ), + "time" => Dict( + "scale_factor" => 1.0, + "add_offset" => 0.0, + "data_type" => "Float64", + "_FillValue" => -9999.0), + "station_x_coordinate" => Dict( + "scale_factor" => 1.0, + "add_offset" => 0.0, + "data_type" => "Float64", + "_FillValue" => -9999.0), + "station_y_coordinate" => Dict( + "scale_factor" => 1.0, + "add_offset" => 0.0, + "data_type" => "Float64", + "_FillValue" => -9999.0), + "zcoordinate_c" => Dict( + "scale_factor" => 0.01, + "add_offset" => 0.0, + "data_type" => "Int32", + "_FillValue" => -9999.0), + "zcoordinate_w" => Dict( + "scale_factor" => 0.01, + "add_offset" => 0.0, + "data_type" => "Int32", + "_FillValue" => -9999.0) + ) +chunk_target_size=1000000 +# +# supporting functions +# +typeconv = Dict{String,DataType}( #String to DataType conversion + "Int32" => Int32, + "Int16" => Int16, + "Int8" => Int8, + "Float32" => Float32, + "Float64" => Float64 +) + +""" +function default_config(hisfile::String) + Example: conf=default_config("myrun_his.nc") +""" +function default_config(hisfile::String) + config=Dict{String,Any}() + globals=Dict{String,Any}() + globals["history_file"]=hisfile + zarrname=replace(hisfile, ".nc" => ".zarr") + zarrname=replace(zarrname, r".*/" => s"") + globals["zarr_file"]=zarrname + globals["chunk_target_size"]=chunk_target_size + config["global"]=globals + nc=NetCDF.open(hisfile) + ncvars=keys(nc) + for varname in try_vars + if varname in ncvars + varconfig=Dict{String,Any}( + "scale_factor" => defaults[varname]["scale_factor"], + "add_offset" => defaults[varname]["add_offset"], + "data_type" => defaults[varname]["data_type"], + ) + config[varname]=varconfig + end + end + finalize(nc) + return config +end + +""" +function range_in_chunks(total_range::OrdinalRange, chunksize::Int) +Divide a range line 1:2:10 into chunks, eg [1:2:5,7:2:9] +This is often usefull to make a loop where the computations are performed per chunk. +""" +function range_in_chunks(total_range::OrdinalRange, chunksize::Int) + chunks=Vector{StepRange}() + n=length(total_range) + n_chunks=max(ceil(Int64,n/chunksize),1) + println("#chunks $(n_chunks) chunk size $(chunksize)") + for i=1:n_chunks + istart=total_range[(i-1)*chunksize+1] + istep=((total_range isa UnitRange) ? 1 : total_range.step) + if intarget + nblocks=div(n,ntarget) + nblocks=max(1,nblocks) + blocklen=div(varsize[blockpref],nblocks) + blocklen=max(blocklen,1) + blocksize[blockpref]=blocklen + end + return tuple(blocksize...) + end + +""" +function varlist(config::Dict{String,Any}) + vars=varlist(config) +""" +function varlist(config::Dict{String,Any}) + vars=Vector{String}() + for varname in keys(config) + if !startswith(lowercase(varname),r"global") + push!(vars,varname) + end + end + return vars +end + +""" +function vardims(var::NcVar) + example: mydims = vardims(nc.vars["waterlevel"]) +""" +function vardims(var::NcVar) + ndims=var.ndim + return reverse([var.dim[i].name for i in 1:ndims]) +end + +""" +function copy_var(input::NcFile,output,varname,config) +""" +function copy_var(input::NcFile,output,varname,config) + #input + in_var=input.vars[varname] + in_atts=in_var.atts + in_type=typeof(in_var).parameters[1] + in_dummy=get(in_atts,"_FillValue",9999.0) + in_size=size(in_var) + in_rank=length(in_size) + #output + if !haskey(config,varname) #create empty one if absent + config[varname]=Dict{String,Any}() + end + out_offset=get(config[varname],"add_offset",defaults[varname]["add_offset"]) + out_scale=get(config[varname],"scale_factor",defaults[varname]["scale_factor"]) + out_dummy=get(config[varname],"_FillValue",defaults[varname]["_FillValue"]) + out_type_str=get(config[varname],"data_type",defaults[varname]["data_type"]) + out_type=typeconv[out_type_str] + out_chunk_target_size=get(config["global"],"chunk_target_size",chunk_target_size) + out_chunk_size=default_his_chunksize(in_size,out_chunk_target_size) + out_atts=copy(in_atts) + out_atts["scale_factor"]=out_scale + out_atts["add_offset"]=out_offset + out_atts["_ARRAY_DIMENSIONS"]=vardims(in_var) + ###DEBUG MVL + println("varname= $(varname)") + println("in_dummy = $(in_dummy)") + println("out_dummy= $(out_dummy)") + #create output ar + out_var = zcreate(out_type, output, varname,in_size...,attrs=out_atts, chunks = out_chunk_size) + println("in_size= $(in_size)") + println("out_size= $(size(out_var))") + #copy content + if in_rank==1 + #in one go + in_temp=in_var[:] + if out_type<:Int + in_temp[in_temp.==in_dummy].=out_offset + out_temp=round.(out_type,(in_temp.-out_offset)./out_scale) + out_temp[in_temp.==in_dummy].=out_dummy + out_var[:].=out_temp[:] + else + out_temp=(in_temp.-out_offset)./out_scale + out_temp[in_temp.==in_dummy].=out_dummy + out_var[:].=out_temp[:] + end + elseif in_rank==2 + println("out_chunk_size = $(out_chunk_size)") + if prod(in_size)==prod(out_chunk_size) + #in one go + in_temp=in_var[:,:] + in_temp[in_temp.==in_dummy].=out_offset + out_temp=round.(out_type,(in_temp.-out_offset)./out_scale) + out_temp[in_temp.==in_dummy].=out_dummy + out_var[:,:].=out_temp[:,:] + else #multiple blocks in time + nblocks=max(div(prod(in_size),prod(out_chunk_size)),1) + dimlen=in_size[2] + blockstep=max(1,div(dimlen,nblocks)) + ifirst=1 + while ifirst") + bool_dummy=in_temp.==in_dummy + bool_nan=isnan.(in_temp) + in_temp[bool_dummy].=out_offset + in_temp[bool_nan].=out_offset + out_temp=round.(out_type,(in_temp.-out_offset)./out_scale) + out_temp[bool_dummy].=out_dummy + out_temp[bool_nan].=out_dummy + print("") + ifirst=ilast + end + end + else + error("Wrong rank ($(in_rank)) for variable $(varname)") + end +end + +""" +function copy_strings(input::NcFile,output,varname,config) +""" +function copy_strings(input::NcFile,output,varname,config) + #input + in_var=input.vars[varname] + in_atts=in_var.atts + in_type=typeof(in_var).parameters[1] + if in_type!=NetCDF.ASCIIChar + error("Can only copy character arrays to Strings, varname = $(varname)") + end + in_size=size(in_var) + in_rank=length(in_size) + if in_rank!=2 + error("Only 2d character arrays are supported for now, varname = $(varname)") + end + in_stations=in_size[2] + in_strlen=in_size[1] + println("in_stations= $(in_stations)") + #output + out_type=Zarr.MaxLengthString{in_strlen,UInt8} + out_atts=copy(in_atts) + out_atts["_ARRAY_DIMENSIONS"]=[vardims(in_var)[1]] + #create output ar + out_var = zcreate(out_type, output, varname,in_stations,attrs=out_atts) + #copy content + #in one go + in_temp=in_var[:,:] + out_temp=replace.(nc_char2string(in_temp),r"\s+" => "") + println("out_temp= $(out_temp)") + out_var[:].=out_temp[:] +end + +""" +function has_z(input::NcFile,vars::Vector) +Report if any of the variables has a z coordinate. +We now take a shortcut and check if the variable is a 3d array. +""" +function has_z(input::NcFile,vars::Vector) + result=false + for varname in vars + r=length(size(input.vars[varname])) + if r==3 + result=true + end + end + return result +end + +# +# main function +# +function main(args) + # read user input + n = length(args) + if n < 1 + println("Usage: julia dflow_his_to_zarr.jl config.toml") + println("This command will generate a config file:") + println("julia dflow_his_to_zarr.jl myrun_his.nc") + elseif endswith(lowercase(first(args)),r".toml") + configfile=first(args) + if !isfile(configfile) + throw(ArgumentError("File not found: $(configfile)\n")) + end + # load configfile + config=TOML.parsefile(configfile) + if !haskey(config,"global") + error("Missing [global] group in config file") + end + globals=config["global"] + if !haskey(globals,"history_file") + error("Missing keyword history_file in group [global] in config file") + end + hisfile=globals["history_file"] + if !isfile(hisfile) + error("Cannot find file: $(hisfile)") + end + his=NetCDF.open(hisfile) + vars=varlist(config) + if !endswith(lowercase(hisfile),r"_his.nc") + error("History file $(hisfile) is not like _his.nc") + end + outname=replace(hisfile, ".nc" => ".zarr") + if haskey(globals,"zarr_file") + outname=globals["zarr_file"] + end + if ispath(outname) + error("Output name $(outname) exists. Will not overwrite.") + end + globalattrs = his.gatts + out = zgroup(outname,attrs=globalattrs) + for varname in vars + println("copying variable $(varname)") + copy_var(his,out,varname,config) + end + #copy dimensions and coordinates + copy_var(his,out,"time",config) + copy_var(his,out,"station_x_coordinate",config) + copy_var(his,out,"station_y_coordinate",config) + copy_strings(his,out,"station_name",config) + if has_z(his,vars) + copy_var(his,out,"zcoordinate_c",config) + copy_var(his,out,"zcoordinate_w",config) + end + Zarr.consolidate_metadata(out) + ## TODO + else # expect history file and generate default config + hisfile=first(args) + if !isfile(hisfile) + throw(ArgumentError("File not found: $(hisfile)\n")) + end + if !endswith(lowercase(first(args)),r"_his.nc") + throw(ArgumentError("Expecting a Deflt3D-FM history netcdf file: $(hisfile)\n")) + end + configfile="config_his.toml" + if isfile(configfile) + throw(ArgumentError("Existing config file. Will not overwrite: $(configfile)\n")) + end + config=default_config(hisfile) + configfile="config_his.toml" + open(configfile, "w") do io + TOML.print(io, config) + end + if(debuglevel>0) + TOML.print(config) + end + @info "Configuration has been written to $(configfile)" + end +end + + +# +# main +# +hisfile=["test_data/locxxz_his.nc"] +configfile=["config_his.toml"] +#main(ARGS) + +nothing diff --git a/case_gloparts/Particles.jl/src/dflow_map_interp_to_zarr.jl b/case_gloparts/Particles.jl/src/dflow_map_interp_to_zarr.jl new file mode 100644 index 0000000..9c0e1db --- /dev/null +++ b/case_gloparts/Particles.jl/src/dflow_map_interp_to_zarr.jl @@ -0,0 +1,698 @@ +# +# Convert delft3d time-series to zarr format +# This add selection, compression and chuning into separate files per chunk +# +using TOML +using NetCDF +using Zarr +using Dates +include(joinpath(@__DIR__,"unstructured_grid.jl")) #bit ad-hoc +include(joinpath(@__DIR__,"dflow.jl")) #bit ad-hoc + +debuglevel=1 + +# +# defaults +# +# these variables are added to the configuration by default +try_vars = ["waterlevel","x_velocity","y_velocity","salinity","temperature"] +# default settings per variable +defaults = Dict( + "waterlevel" => Dict( + "name" => "waterlevel", + "scale_factor" => 0.001, + "add_offset" => 0.0, + "data_type" => "Int16", + "_FillValue" => 9999 ), + "x_velocity" => Dict( + "name" => "x_velocity", + "scale_factor" => 0.001, + "add_offset" => 0.0, + "data_type" => "Int16", + "_FillValue" => 9999 ), + "y_velocity" => Dict( + "name" => "y_velocity", + "scale_factor" => 0.001, + "add_offset" => 0.0, + "data_type" => "Int16", + "_FillValue" => 9999 ), + "salinity" => Dict( + "name" => "salinity", + "scale_factor" => 0.01, + "add_offset" => 0.0, + "data_type" => "Int16", + "_FillValue" => 9999 ), + "temperature" => Dict( + "name" => "temperature", + "scale_factor" => 0.01, + "add_offset" => 0.0, + "data_type" => "Int16", + "_FillValue" => 9999 ), + "time" => Dict( + "scale_factor" => 1.0, + "add_offset" => 0.0, + "data_type" => "Float64", + "_FillValue" => -9999.0), + "x_center" => Dict( + "scale_factor" => 1.0, + "add_offset" => 0.0, + "data_type" => "Float64", + "_FillValue" => -9999.0), + "y_center" => Dict( + "scale_factor" => 1.0, + "add_offset" => 0.0, + "data_type" => "Float64", + "_FillValue" => -9999.0), + "z_center" => Dict( + "scale_factor" => 1.0, + "add_offset" => 0.0, + "data_type" => "Float64", + "_FillValue" => -9999.0), + "z_center_3d" => Dict( + "scale_factor" => 0.1, + "add_offset" => 0.0, + "data_type" => "Int16", + "_FillValue" => -9999.0) + ) + +# variables appear under different names in the delft3d-fm output files. Here we list the options +aliases=Dict{String,Vector{String}}( + "waterlevel" => ["s1", "mesh2d_s1"], + "x_velocity" => ["ucx", "mesh2d_ucx"], + "y_velocity" => ["ucy", "mesh2d_ucy"], + "salinity" => ["sa1","mesh2d_sa1"], #sa1 not sal (one)? + "temperature" => ["tem1","mesh2d_tem1"], + "x_center" => ["FlowElem_xcc","mesh2d_face_x"], + "y_center" => ["FlowElem_ycc","mesh2d_face_y"], + "z_center" => ["mesh2d_layer_z","LayCoord_cc"], #1d + "z_center_3d" => ["mesh2d_flowelem_zcc"], #3d + "x_node" => ["mesh2d_node_x","NetNode_x"], + "y_node" => ["mesh2d_node_y","NetNode_y"], + "time" => ["time"] +) +#znames_iface=["mesh2d_layer_z"] + +chunk_target_size=1000000 +# +# supporting functions +# +typeconv = Dict{String,DataType}( #String to DataType conversion + "Int32" => Int32, + "Int16" => Int16, + "Int8" => Int8, + "Float32" => Float32, + "Float64" => Float64 +) + +""" +function get_varname(name::String,nc::NcFile) +Find name of a variable in an nc file using a list of alias values +Example: xname = get_name("x_node",keys(ncfile)) +returns nothing if variable does not exist on the file +""" +function get_varname(name::String,nc::NcFile) + ncvarnames=keys(nc) + if !(name in keys(aliases)) + error("unknown variable") + end + tempstrs=intersect(aliases[name],ncvarnames) + if length(tempstrs)==0 + return nothing + else + return first(tempstrs) + end +end + +""" +function get_coord_info(mapfiles::Vector{String}) +Example: info=get_coord_info(["test_data/locxxz_map.nc"]) + ymin=info["ymin"] +Extract some info from mapfiles. The purpose is to collect the +info that is needed for the default config. +""" +function get_coord_info(mapfiles::Vector{String}) + firstmap=NetCDF.open(first(mapfiles)) + # coordinate names + xname_node=get_varname("x_node",firstmap) + if xname_node===nothing + error("Could not find x-coordinate variable in $(firstmap)") + end + yname_node=get_varname("y_node",firstmap) + if yname_node===nothing + error("Could not find y-coordinate variable in $(firstmap)") + end + xname_center=get_varname("x_center",firstmap) + if xname_center===nothing + error("Could not find x-coordinate variable in $(firstmap)") + end + yname_center=get_varname("y_center",firstmap) + if yname_center===nothing + error("Could not find y-coordinate variable in $(firstmap)") + end + zname_center=get_varname("z_center",firstmap) + if zname_center===nothing + zname_center="" + end + # determine bbox + xs=firstmap[xname_node][:] + xmax=maximum(xs) + xmin=minimum(xs) + ys=firstmap[yname_node][:] + ymax=maximum(ys) + ymin=minimum(ys) + xs_center=firstmap[xname_center][:] + ncells=[length(xs_center)] + varnames=[] + for varname in try_vars + ncvar=get_varname(varname,firstmap) + if !(ncvar===nothing) + push!(varnames,varname) + end + end + finalize(firstmap) + # look at other map files + for i=2:length(mapfiles) + mapfile=NetCDF.open(mapfiles[i]) + xs=mapfile[xname_node][:] + xmax=max(xmax,maximum(xs)) + xmin=min(xmin,minimum(xs)) + ys=mapfile[yname_node][:] + ymax=max(ymax,maximum(ys)) + ymin=min(ymin,minimum(ys)) + xs_center=mapfile[xname_center][:] + push!(ncells,length(xs_center)) + finalize(mapfile) + end + return Dict{String,Any}("xmin" => xmin, "ymin" => ymin, "xmax" => xmax, "ymax" => ymax, + "ncells" => ncells, "xname_node" => xname_node, "yname_node" => yname_node, + "xname_center" => xname_center, "yname_center" => yname_center, + "zname_center" => zname_center, "varnames" => varnames) +end + +""" +function default_config(mapfiles::Vector{String}) + Example: conf=default_config("myrun_map_interp.nc") +""" +function default_config(mapfiles::Vector{String}) + #firstmap=first(mapfiles) + config=Dict{String,Any}() + globals=Dict{String,Any}() + globals["map_files"]=mapfiles + zarrname=replace(first(mapfiles), ".nc" => ".zarr") + zarrname=replace(zarrname, r"_[0-9]+" => s"") + zarrname=replace(zarrname, r".*/" => s"") + globals["zarr_file"]=zarrname + globals["chunk_target_size"]=chunk_target_size + info=get_coord_info(mapfiles) + globals["extent"]=Dict{String,Any}("xmin" => info["xmin"], "xmax" =>info["xmax"], + "ymin" => info["ymin"], "ymax" =>info["ymax"]) + globals["islatlon"]=false + #propose dx and dy for regular grid + lx=info["xmax"]-info["xmin"] + ly=info["ymax"]-info["ymin"] + n=sum(info["ncells"]) + area_per_cell=lx*ly/n + aspect_ratio=lx/ly + dx=sqrt(area_per_cell) #estimate dy same + nx=max(round(Int64,lx/dx)-1,1) + ny=max(round(Int64,ly/dx)-1,1) + globals["nx"]=nx + globals["ny"]=ny + config["global"]=globals + varnames=info["varnames"] + for varname in try_vars + varconfig=Dict{String,Any}( + "scale_factor" => defaults[varname]["scale_factor"], + "add_offset" => defaults[varname]["add_offset"], + "data_type" => defaults[varname]["data_type"], + ) + config[varname]=varconfig + end + return config +end + +""" +function default_map_chunksize(nx,ny,ntarget) + example: (ncx,ncy)=default_map_chunksize(nx,ny,ntarget) + with 40 x 10 cells and a target size of 100, this will give 10x10 chunks + """ +function default_map_chunksize(nx,ny,ntarget) + nx_chunks=max(1,round(Int64,sqrt(ntarget))) + ny_chunks=max(1,round(Int64,sqrt(ntarget))) + if nx_chunks>nx + nx_chunks=nx + ny_chunks=max(1,round(Int64,(ntarget/nx))) + ny_chunks=min(ny_chunks) + end + if ny_chunks>ny + ny_chunks=ny + nx_chunks=max(1,round(Int64,(ntarget/ny))) + nx_chunks=min(nx_chunks,nx) + end + return (nx_chunks,ny_chunks) +end + +""" +function default_his_chunksize(Vector::varsize,ntarget=1000000) + cz = default_his_chunksize((1000,1000),10000) + Strategy is to make chunks over stations +""" +function default_his_chunksize(varsize,ntarget=1000000) + blocksize=[varsize...] #make mutable + rank=length(varsize) + n=prod(varsize) + blockpref=1 + if rank==3 + blockpref=2 + end + if n>ntarget + nblocks=div(n,ntarget) + nblocks=max(1,nblocks) + blocklen=div(varsize[blockpref],nblocks) + blocklen=max(blocklen,1) + blocksize[blockpref]=blocklen + end + return tuple(blocksize...) + end + +""" +function varlist(config::Dict{String,Any}) + vars=varlist(config) +""" +function varlist(config::Dict{String,Any}) + vars=Vector{String}() + for varname in keys(config) + if !startswith(lowercase(varname),r"global") + push!(vars,varname) + end + end + return vars +end + +""" +function vardims(var::NcVar) + example: mydims = vardims(nc.vars["waterlevel"]) +""" +function vardims(var::NcVar) + ndims=var.ndim + return reverse([var.dim[i].name for i in 1:ndims]) +end + +""" +function copy_var(input::NcFile,output,varname,config,stoponmissing=true) +""" +function copy_var(input::NcFile,output,varname,config,stop_on_missing=true) + println("copy variable name=$(varname)") + ncname=get_varname(varname,input) + if (ncname===nothing) + if stop_on_missing + error("could not find variable $(varname) in $(input.name).") + else + return nothing + end + end + in_var=input.vars[ncname] + in_atts=in_var.atts + in_type=typeof(in_var).parameters[1] + in_dummy=get(in_atts,"_FillValue",9999.0) + in_size=size(in_var) + in_rank=length(in_size) + #output + if !haskey(config,varname) #create empty one if absent + config[varname]=Dict{String,Any}() + end + out_offset=get(config[varname],"add_offset",defaults[varname]["add_offset"]) + out_scale=get(config[varname],"scale_factor",defaults[varname]["scale_factor"]) + out_dummy=get(config[varname],"_FillValue",defaults[varname]["_FillValue"]) + out_type_str=get(config[varname],"data_type",defaults[varname]["data_type"]) + out_type=typeconv[out_type_str] + out_chunk_target_size=get(config["global"],"chunk_target_size",chunk_target_size) + out_chunk_size=default_his_chunksize(in_size,out_chunk_target_size) + out_atts=copy(in_atts) + out_atts["scale_factor"]=out_scale + out_atts["add_offset"]=out_offset + out_atts["_ARRAY_DIMENSIONS"]=vardims(in_var) + ###DEBUG MVL + println("varname= $(varname)") + println("in_dummy = $(in_dummy)") + println("out_dummy= $(out_dummy)") + #create output ar + out_var = zcreate(out_type, output, varname,in_size...,attrs=out_atts, chunks = out_chunk_size) + println("in_size= $(in_size)") + println("out_size= $(size(out_var))") + #copy content + if in_rank==1 + #in one go + in_temp=in_var[:] + if out_type<:Int + out_temp=round.(out_type,(in_temp.-out_offset)./out_scale) + out_temp[in_temp.==in_dummy].=out_dummy + out_var[:].=out_temp[:] + else + out_temp=(in_temp.-out_offset)./out_scale + out_temp[in_temp.==in_dummy].=out_dummy + out_var[:].=out_temp[:] + end + elseif in_rank==2 + println("out_chunk_size = $(out_chunk_size)") + if prod(in_size)==prod(out_chunk_size) + #in one go + in_temp=in_var[:,:] + out_temp=round.(out_type,(in_temp.-out_offset)./out_scale) + out_temp[in_temp.==in_dummy].=out_dummy + out_var[:,:].=out_temp[:,:] + else #multiple blocks in time + nblocks=max(div(prod(in_size),prod(out_chunk_size)),1) + dimlen=in_size[2] + blockstep=max(1,div(dimlen,nblocks)) + ifirst=1 + while ifirst "") + println("out_temp= $(out_temp)") + out_var[:].=out_temp[:] +end + +""" +function has_z(input::NcFile,vars::Vector) +Report if any of the variables has a z coordinate. +We now take a shortcut and check if the variable is a 3d array. +""" +function has_z(input::NcFile,vars::Vector) + result=false + for varname in vars + r=length(size(input.vars[varname])) + if r==3 + result=true + end + end + return result +end + +# +# main function +# +function main(args) + # read user input + n = length(args) + if n < 1 + println("Usage: julia dflow_map_interp_to_zarr.jl config.toml") + println("This command will generate a config file:") + println("julia dflow_map_interp_to_zarr.jl myrun_map_00*.nc") + elseif endswith(lowercase(first(args)),r".toml") + configfile=first(args) + if !isfile(configfile) + throw(ArgumentError("File not found: $(configfile)\n")) + end + # load configfile + config=TOML.parsefile(configfile) + if !haskey(config,"global") + error("Missing [global] group in config file") + end + globals=config["global"] + if !haskey(globals,"map_files") + error("Missing keyword map_files in group [global] in config file") + end + map_files=globals["map_files"] + if !isfile(first(map_files)) + error("Cannot find file: $(first(map_files))") + end + outname="" + if haskey(globals,"zarr_file") + outname=globals["zarr_file"] + else + error("missing key zarr_file in configuration)") + end + if ispath(outname) + error("Output name $(outname) exists. Will not overwrite.") + end + # initialize netcdf maps files + map=[NetCDF.open(filename) for filename in map_files] + firstmap=first(map) + # Create zarr file and start copying metadata + globalattrs = firstmap.gatts + output = zgroup(outname,attrs=globalattrs) + # output coordinates + islatlon=globals["islatlon"] + nx=max(1,globals["nx"]-1) + ny=max(1,globals["ny"]-1) + extent=globals["extent"] + xmin=extent["xmin"] + xmax=extent["xmax"] + ymin=extent["ymin"] + ymax=extent["ymax"] + dx=(xmax-xmin)/nx + dy=(ymax-ymin)/ny + xmin=xmin+0.5*dx + xmax=xmax-0.5*dx + ymin=ymin+0.5*dy + ymax=ymax-0.5*dy + xpoints=collect(range(xmin,stop=xmax,length=nx)) + ypoints=collect(range(ymin,stop=ymax,length=ny)) + println("x: $(xmin) : $(dx) $(xmax) : $(nx)") + println("y: $(ymin) : $(dy) $(ymax) : $(ny)") + # write coordinates + xname_node=get_varname("x_node",firstmap) + xatts=firstmap[xname_node].atts + xatts["_ARRAY_DIMENSIONS"]=["x"] + xvar = zcreate(Float64, output, "x_center",length(xpoints),attrs=xatts) + xvar[:]=xpoints[:] + yname_node=get_varname("y_node",firstmap) + yatts=firstmap[yname_node].atts + yatts["_ARRAY_DIMENSIONS"]=["y"] + yvar = zcreate(Float64, output, "y_center",length(ypoints),attrs=yatts) + yvar[:]=ypoints[:] + # set up interpolation + interp=load_dflow_grid(map,50,islatlon) + # interpolate per variable + vars=varlist(config) + for varname in vars + println("Interpolating for variable $(varname)") + interp_var(map,interp,output,varname,xpoints,ypoints,config) + end + #copy dimensions and coordinates + nc_z=get_varname("z_center_3d",firstmap) #try 3d z coords + if !(nc_z==nothing) + interp_var(map,interp,output,"z_center_3d",xpoints,ypoints,config) + end + copy_var(firstmap,output,"z_center",config,false) #try 1d z coords + copy_var(firstmap,output,"time",config) + # create consolidate_metadata for faster internet access + Zarr.consolidate_metadata(output) + + else # expect list of mapfiles and generate default config + first_mapfile=first(args) + if !isfile(first_mapfile) + throw(ArgumentError("File not found: $(first_mapfile)\n")) + end + if !endswith(lowercase(first(args)),r"_map.nc") + throw(ArgumentError("Expecting a Deflt3D-FM map netcdf file: $(first_mapfile)\n")) + end + configfile="config_maps_interp.toml" + if isfile(configfile) + throw(ArgumentError("Existing config file. Will not overwrite: $(configfile)\n")) + end + config=default_config(args) + configfile="config_maps_interp.toml" + open(configfile, "w") do io + TOML.print(io, config) + end + if(debuglevel>0) + TOML.print(config) + end + @info "Configuration has been written to $(configfile)" + end +end + + +# +# main +# +mapfiles=["test_data/estuary_0000_map.nc", "test_data/estuary_0000_map.nc"] +#mapfiles=["test_data/locxxz_map.nc"] +configfile=["config_map_interp.toml"] +main(ARGS) + +nothing diff --git a/case_gloparts/Particles.jl/src/era5.jl b/case_gloparts/Particles.jl/src/era5.jl new file mode 100644 index 0000000..e4078fe --- /dev/null +++ b/case_gloparts/Particles.jl/src/era5.jl @@ -0,0 +1,155 @@ +# Interact with ERA5 netcdf output +# basically these are lon-lat regular NetCDF files. +# These routines are tested with data from the copernicus +# climate data store +# For more see: https://cds.climate.copernicus.eu +# +# function EraData(path,filename) +# function load_map_slice(data::EraData,varname,itime) +# function get_reftime(data::EraData) +# function get_times(data::EraData,reftime::DateTime) +# function as_DateTime(data::EraData,reftime::DateTime,relative_time) +# function initialize_interpolation(data::EraData,varname::String,reftime::DateTime,dummy=0.0) + + +using NetCDF +using Dates + +debuglevel=1 #0-nothing, larger more output + +""" +Using the EraData struct the file-handling becomes object-oriented. +""" +mutable struct EraData + file::NcFile + #derived data + grid::CartesianGrid + """ + Constructor + era_data = EraData(".","my_era_file.nc") + """ + function EraData(path,filename) + file=NetCDF.open(joinpath(path,filename)) + x=collect(file.vars["longitude"]) + y=collect(file.vars["latitude"]) + grid=CartesianGrid(x,y,true) + return new(file,grid) + end +end + +""" + wind_x = load_map_slice(era_data,"u10",1) +Load data for a time-dependent variable for a specific time. +""" +function load_map_slice(data::EraData,varname,itime) + ndims=length(size(data.file[varname]))-1 #only spatial dims + scale_factor=data.file[varname].atts["scale_factor"] + offset=data.file[varname].atts["add_offset"] + dummy=data.file[varname].atts["missing_value"] + if ndims==2 + tempvar=data.file[varname][:,:,itime] + if length(size(tempvar))>2 # Starting somewhere around julia version 1.3 dimensions are dropped automatically when slicing + # this check can be removed some time in the future, when we assumen versions less than 1.4 are no longer in use + tempvar=dropdims(tempvar,dims=3) + end + var=offset.+scale_factor.*tempvar + var[tempvar.==dummy].=NaN + return var + else + error("only 2D (one layer) variables supported for now.") + end +end + +""" + t_ref = get_reftime(era_data) +Read the reference time from the attributes of time in the netcdf file. +Times are described by a reference time (DateTime type) and the number of hours +relative to this t_ref. +""" +function get_reftime(data::EraData) + time_relative=data.file.vars["time"] + units=time_relative.atts["units"] + temp=split(units,"since") + temp2=split(temp[2],".") + println("ref date $(temp2[1])") + t0=DateTime(strip(temp2[1]),"yyyy-mm-dd HH:MM:SS") + dt_seconds=1.0 + if startswith(temp[1],"seconds") + dt_seconds=1.0 + elseif startswith(temp[1],"minutes") + dt_seconds=60.0 + elseif startswith(temp[1],"hours") + dt_seconds=3600.0 + elseif startswith(temp[1],"days") + dt_seconds=24.0*3600.0 + else + error("Invalid time-step unit in map-file.") + end + seconds_per_day=24.0*3600.0 + if debuglevel>2 + println("$(dt_seconds), $(t0), $(seconds_per_day), $(time_relative[1])") + end + relative_days_in_seconds=div(dt_seconds*time_relative[1],seconds_per_day)*seconds_per_day + return t0 + Dates.Second(relative_days_in_seconds) +end + +""" + times=get_times(era_data,Dates.DateTime(2019,1,1)) + +Get available times im netcdf map files as a range in seconds, eg 0.0:3600.0:7200.0 +The reftime (second arg) is a Dates.DateTime can be used to change the reftime to something +convenient. With reftime=get_reftime(map) you can find the time of the first map, which is often +a nice default. +""" +function get_times(data::EraData,reftime::DateTime) + time_relative=data.file.vars["time"] + units=time_relative.atts["units"] + temp=split(units,"since") + temp2=split(temp[2],".") + println("ref date $(temp2[1])") + t0=DateTime(strip(temp2[1]),"yyyy-mm-dd HH:MM:SS") + dt_seconds=1.0 + if startswith(temp[1],"seconds") + dt_seconds=1.0 + elseif startswith(temp[1],"minutes") + dt_seconds=60.0 + elseif startswith(temp[1],"hours") + dt_seconds=3600.0 + elseif startswith(temp[1],"days") + dt_seconds=24.0*3600.0 + else + error("Invalid time-step unit in map-file.") + end + times=[] + for ti=1:length(time_relative) + push!(times,(0.001*Dates.value(t0-reftime))+time_relative[ti]*dt_seconds) + end + return times +end + +""" + t = as_DateTime(data,reftime,relative_time) +Convert a reftime (DateTime) and relative_time (Float64 with seconds) relative to this. +Returns a DateTime type. +""" +function as_DateTime(data::EraData,reftime::DateTime,relative_time) + reftime+Float64(relative_time)*Dates.Second(1) +end + +""" +p = initialize_interpolation(era,"msl",t0) +Create an interpolation function p(x,y,z,t) +""" +function initialize_interpolation(data::EraData,varname::String,reftime::DateTime,dummy=0.0,cache_direction::Symbol=:forwards) + times=get_times(data,reftime) + values=data.file.vars[varname] #TODO more checks + missing_value=values.atts["missing_value"] + scaling=values.atts["scale_factor"] + offset=values.atts["add_offset"] + xyt=CartesianXYTGrid(data.grid,times,values,varname,missing_value,scaling,offset,cache_direction) + function f(x,y,z,t) + value=interpolate(xyt,x,y,t,dummy) + return value + end + return f +end diff --git a/case_gloparts/Particles.jl/src/grids.jl b/case_gloparts/Particles.jl/src/grids.jl new file mode 100644 index 0000000..fb59275 --- /dev/null +++ b/case_gloparts/Particles.jl/src/grids.jl @@ -0,0 +1,10 @@ +# +# Generic grids +# + +# Base type for all grids +abstract type BaseGrid end +abstract type SpaceGrid <: BaseGrid end +abstract type SpaceTimeGrid <: BaseGrid end + + diff --git a/case_gloparts/Particles.jl/src/matroos_grid.jl b/case_gloparts/Particles.jl/src/matroos_grid.jl new file mode 100644 index 0000000..0bb3188 --- /dev/null +++ b/case_gloparts/Particles.jl/src/matroos_grid.jl @@ -0,0 +1,163 @@ +# Interact with gridded netcdf output from the Deltares/RWS Matroos database +# The current implementation only considers lon-lat regular NetCDF files. +# These routines are tested with data from matroos.deltares.nl / matroos.rws.nl +# +# For more see: https://matroos.rws.nl +# or https://matroos.deltares.nl +# +# function MatroosData(path,filename) +# function load_map_slice(data::MatroosData,varname,itime) +# function get_reftime(data::MatroosData) +# function get_times(data::MatroosData,reftime::DateTime) +# function as_DateTime(data::MatroosData,reftime::DateTime,relative_time) +# function initialize_interpolation(data::MatroosData,varname::String,reftime::DateTime,dummy=0.0) + + +using NetCDF +using Dates + +debuglevel=1 #0-nothing, larger more output + +""" +Using the MatroosData struct the file-handling becomes object-oriented. +""" +mutable struct MatroosData + file::NcFile + #derived data + grid::CartesianGrid + """ + Constructor + matroos_data = MatroosData(".","my_matroos_file.nc") + """ + function MatroosData(path,filename) + file=NetCDF.open(joinpath(path,filename)) + if "x" in keys(file.vars) + x=collect(file.vars["x"]) + else + error("No variable x in NetCDF files.") + end + if "y" in keys(file.vars) + y=collect(file.vars["y"]) + else + error("No variable y in NetCDF files.") + end + grid=CartesianGrid(x,y,true) + return new(file,grid) + end +end + +""" + wind_x = load_map_slice(matroos_data,"u10",1) +Load data for a time-dependent variable for a specific time. +""" +function load_map_slice(data::MatroosData,varname,itime) + ndims=length(size(data.file[varname]))-1 #only spatial dims + scale_factor=data.file[varname].atts["scale_factor"] + offset=data.file[varname].atts["add_offset"] + dummy=data.file[varname].atts["_FillValue"] + if ndims==2 + tempvar=data.file[varname][:,:,itime] + if length(size(tempvar))==3 # Starting somewhere around julia version 1.3 singleton dimensions are dropped automatically when slicing + tempvar=dropdims(tempvar,dims=3) + end + var=offset.+scale_factor.*tempvar + var[tempvar.==dummy].=NaN + return var + else + error("only 2D (one layer) variables supported for now.") + end +end + +""" + t_ref = get_reftime(matroos_data) +Read the reference time from the attributes of time in the netcdf file. +Times are described by a reference time (DateTime type) and the number of hours +relative to this t_ref. +""" +function get_reftime(data::MatroosData) + time_relative=data.file.vars["time"] + units=time_relative.atts["units"] + temp=split(units,"since") + temp2=split(temp[2],".") + println("ref date $(temp2[1])") + t0=DateTime(strip(temp2[1]),"yyyy-mm-dd HH:MM:SS") + dt_seconds=1.0 + if startswith(temp[1],"seconds") + dt_seconds=1.0 + elseif startswith(temp[1],"minutes") + dt_seconds=60.0 + elseif startswith(temp[1],"hours") + dt_seconds=3600.0 + elseif startswith(temp[1],"days") + dt_seconds=24.0*3600.0 + else + error("Invalid time-step unit in map-file.") + end + seconds_per_day=24.0*3600.0 + if debuglevel>2 + println("$(dt_seconds), $(t0), $(seconds_per_day), $(time_relative[1])") + end + relative_days_in_seconds=div(dt_seconds*time_relative[1],seconds_per_day)*seconds_per_day + return t0 + Dates.Second(relative_days_in_seconds) +end + +""" + times=get_times(matroos_data,Dates.DateTime(2019,1,1)) + +Get available times im netcdf map files as a range in seconds, eg 0.0:3600.0:7200.0 +The reftime (second arg) is a Dates.DateTime can be used to change the reftime to something +convenient. With reftime=get_reftime(map) you can find the time of the first map, which is often +a nice default. +""" +function get_times(data::MatroosData,reftime::DateTime) + time_relative=data.file.vars["time"] + units=time_relative.atts["units"] + temp=split(units,"since") + temp2=split(temp[2],".") + println("ref date $(temp2[1])") + t0=DateTime(strip(temp2[1]),"yyyy-mm-dd HH:MM:SS") + dt_seconds=1.0 + if startswith(temp[1],"seconds") + dt_seconds=1.0 + elseif startswith(temp[1],"minutes") + dt_seconds=60.0 + elseif startswith(temp[1],"hours") + dt_seconds=3600.0 + elseif startswith(temp[1],"days") + dt_seconds=24.0*3600.0 + else + error("Invalid time-step unit in map-file.") + end + times=[] + for ti=1:length(time_relative) + push!(times,(0.001*Dates.value(t0-reftime))+time_relative[ti]*dt_seconds) + end + return times +end + +""" + t = as_DateTime(data,reftime,relative_time) +Convert a reftime (DateTime) and relative_time (Float64 with seconds) relative to this. +Returns a DateTime type. +""" +function as_DateTime(data::MatroosData,reftime::DateTime,relative_time) + reftime+Float64(relative_time)*Dates.Second(1) +end + +""" +p = initialize_interpolation(matroos,"msl",t0) +Create an interpolation function p(x,y,z,t) +""" +function initialize_interpolation(data::MatroosData,varname::String,reftime::DateTime,dummy=0.0,cache_direction::Symbol=:forwards) + times=get_times(data,reftime) + values=data.file.vars[varname] #TODO more checks + missing_value=values.atts["_FillValue"] + scaling=values.atts["scale_factor"] + offset=values.atts["add_offset"] + xyt=CartesianXYTGrid(data.grid,times,values,varname,missing_value,scaling,offset,cache_direction) + function f(x,y,z,t) + value=interpolate(xyt,x,y,t,dummy) + return value + end + return f +end diff --git a/case_gloparts/Particles.jl/src/particles_core.jl b/case_gloparts/Particles.jl/src/particles_core.jl new file mode 100644 index 0000000..158af67 --- /dev/null +++ b/case_gloparts/Particles.jl/src/particles_core.jl @@ -0,0 +1,384 @@ +# Shared routines for particle modelling. +# No routines for specific data sources or particle model-equations should be included, +# just the generic stuff goes here. + +using Plots +using StaticArrays +using Dates +using Printf +using LabelledArrays + +debuglevel = 1 + +""" + d = default_userdata() + initialize Dict with some default configuration data +""" +function default_userdata() + Dict( + # general + "dt" => 0.01, # TODO a fixed timestep will not work in general, + "tstart" => 0.0, + "tend" => 1.0, + "reftime" => DateTime(2000, 1, 1), # Jan 1st 2000, + "coordinates" => "projected", # projected or spherical, + "nparticles" => 10, # number of particles, + "variables" => ("x", "y"), # recognized are x,y,lat,lon other variables are written with partial meta-data, + "dumval" => 9999.0, + "time_direction" => :forwards, # :forwards or :backwards, + # plotting to screen + "plot_maps" => false, + "plot_maps_size" => (1200, 1000), + "plot_maps_times" => [], + "plot_maps_func" => plot_maps_xy, + "plot_maps_folder" => "output", + "plot_maps_prefix" => "map", + # results that are kept in memmory + "keep_particles" => false, + "keep_particle_times" => [], + "all_particles" => [], + # results written to netcdf file + "write_maps" => false, + "write_maps_times" => [], + "write_maps_dir" => ".", + "write_maps_as_series" => true, + "write_maps_filename" => "output.nc", + ) +end + +# Override to quickly make +Base.getindex(nt::NamedTuple, s::AbstractString) = getindex(nt, Symbol(s)) +# Base.setindex!(nt::NamedTuple, v, k::AbstractString) = setindex!(nt, v, Symbol(k)) + +""" + run_simulation(d) + Main simulation routine. The configuration is contained in Dict d, + which may contain call-backs for plotting etc. +""" +function run_simulation(d) + vars = d["variables"] + npart = d["nparticles"] + nvars = length(vars) + p = d["particles"] + p_all=d["all_particles"] #if requested keep particles at intermediate times + Plots.default(:size, d["plot_maps_size"]) + + # show inputs + if debuglevel > 2 + println("configuration") + display(d) + end + + # simulation timespan + tstart = d["tstart"] + tend = d["tend"] + tref = d["reftime"] + t = tstart + if d["time_direction"] == :forwards + t = tstart + elseif d["time_direction"] == :backwards + t = tend + else + throw(ArgumentError("Unsupported variable d[time_direction]")) + end + + # initialize outputs + target_times = Float64[] + if d["plot_maps"] + # init timing + plot_maps_times = d["plot_maps_times"] + target_times = sort(union(plot_maps_times, target_times)) + print("plotting output at t=") + print_times(tref, plot_maps_times) + # init plots + Plots.default(:size, d["plot_maps_size"]) + fig1 = d["plot_maps_background"](d) + # scatter!(fig1,p[1,:],p[2,:],legend=false) + d["plot_maps_func"](fig1, d, p) + # TODO possibly label=[string(t)]) + # gui(fig1) + # check outputfolder + plot_maps_folder = d["plot_maps_folder"] + length(plot_maps_folder) > 0 || error("empty plot_maps_folder") + if isdir(plot_maps_folder) + println("Removing existing output in folder $(plot_maps_folder)") + rm(plot_maps_folder, recursive = true) + end + mkdir(plot_maps_folder) + end + if d["write_maps"] + write_maps_times = d["write_maps_times"] + target_times = sort(union(write_maps_times, target_times)) + print("writing output to netcdf at t = ") + print_times(tref, write_maps_times) + (nc_out, ncvars) = initialize_netcdf_output(d) + end + if d["keep_particles"] # in memmory storage + keep_particle_times = d["keep_particle_times"] + target_times = sort(union(keep_particle_times, target_times)) + print("writing output to memory at t = ") + print_times(tref, keep_particle_times) + end + + # if the end time of the simulation is after the last output request + # then still simulate until end times. TODO This is debatable. + if ((length(target_times) == 0) || (target_times[end] < tend)) + push!(target_times, tend) + end + # remove output requests outside the simulation time-span + if target_times[end] > tend + temp = sort(union(target_times, tend)) + i_last = findlast(x -> x <= tend, temp) + target_times = temp[1:i_last] + end + print("interrupt simulation for output at t = ") + print_times(tref, target_times) + println("Simulation from time $(t) s to $(tend) s since $(tref) since $(tref)") + if d["time_direction"] == :forwards + # nothing + elseif d["time_direction"] == :backwards + target_times = sort(target_times, rev = true) + end + # simulate in pieces until next output-action + for t_stop in target_times + t_abs = tref + Second(round(t)) + t_stop_abs = tref + Second(round(t_stop)) + if d["time_direction"] == :forwards + println("t=$(t) -> $(t_stop) : $(t_abs) -> $(t_stop_abs) : $(round(100.0 * (t_stop - tstart) / (tend - tstart), digits = 1))%") + elseif d["time_direction"] == :backwards + println("t=$(t) -> $(t_stop) : $(t_abs) -> $(t_stop_abs) : $(round(100.0 * (tend - t_stop) / (tend - tstart), digits = 1))%") + end + t = simulate!(p, t, t_stop, d) + if (d["plot_maps"]) && (t_stop in plot_maps_times) + (debuglevel > 1) && println("plotting map output") + Plots.default(:size, d["plot_maps_size"]) + fig1 = d["plot_maps_background"](d) + d["plot_maps_func"](fig1, d, p) + # sleep(1) + title!(fig1, "time $(t_stop_abs) : t=$(t_stop)") + # gui(fig1) #force display to screen + prefix = d["plot_maps_prefix"] + savefig(fig1, joinpath(d["plot_maps_folder"], @sprintf("%s_%010.2f.png", prefix, t))) + end + if (d["write_maps"]) && (t_stop in write_maps_times) + write_maps_as_series = d["write_maps_as_series"] + timei = findfirst(x -> x == t_stop, write_maps_times) + for vari = 1:nvars + varname = vars[vari] + if write_maps_as_series + start = [timei, 1] # part x time + count = [1, npart] + NetCDF.putvar(ncvars[vari], collect(p[vari, :]'); start = start, count = count) + else + start = [1, timei] # part x time + count = [npart, 1] + NetCDF.putvar(ncvars[vari], p[vari, :]; start = start, count = count) + end + end + end + if (d["keep_particles"]) && (t_stop in keep_particle_times) + push!(p_all,copy(p)) + end + end + + if d["write_maps"] + # NetCDF.close(nc_out) #close was abandoned by NetCDF + finalize(nc_out) + end + + # wait for user + # if !isinteractive() #wait for user to kill final plot + # println("Type [enter] to finish script") + # readline() + # end +end + +""" + t_next=simulate!(p,t_now,t_stop,d) + +Compute multiple timesteps until t>=t_stop for all particles in matrix p. +Possibly using parameters (eg dt) from d of type userdata. +""" +function simulate!(p, t, t_stop, d) + Δt = d["dt"] + f! = d["f"] + variables = d["variables"] + (m, n) = size(p) # no variables x no particles + ∂s = @LArray zeros(length(variables)) (:x, :y, :z, :t) + # s = @MVector zeros(length(variables)) + if d["time_direction"] == :forwards + while (t < (t_stop - 0.25 * Δt)) + # (debuglevel >= 2) && println("... t=$(t) < $(t_stop)") + for i = 1:n + s = @view p[:, i] + forward!(f!, Δt, ∂s, s, t, i, d) + end + t += Δt + end + elseif d["time_direction"] == :backwards + while (t > (t_stop + 0.25 * Δt)) + # (debuglevel >= 2) && println("... t=$(t) > $(t_stop)") + for i = 1:n + s = @view p[:, i] + f!(∂s, s, t, i, d) + + # I am still not quite sure of we should use += or -= + # I think += is the way to go, and handle the velocity difference in the f(ds,s,t,i,d) function + s .+= ∂s .* Δt # Euler forward + # println(" $(i) $(s)") + # p[:,i] = s[:] + end + t -= Δt + end + end + return t +end + +function forward!(f!, Δt, ∂s, s, t, i, d) + f!(∂s, s, t, i, d) + s .+= ∂s .* Δt +end + + + +""" +print_times(reftime,times) + +Print an array of relative times in compact readable format +""" +function print_times(reftime, times) + println(IOContext(stdout, :limit => true), times) +end + +""" + i1 = index(2,[1,2,3,4]) + i2 = index("bob",["alex","bob","charlie"]) + Find first occurrence of a variable in an array. + Returns nothing if the value is not found. +""" +function index(var, vars) + return indexin([var], vars)[1] +end + +""" + f=plot_maps_xy(fig,d,p) + Plot particles as dots in xy-plane. +""" +function plot_maps_xy(fig, d, p) + if "x" in d["variables"] + x_index = index("x", d["variables"]) + y_index = index("y", d["variables"]) + elseif "lon" in d["variables"] + x_index = index("lon", d["variables"]) + y_index = index("lat", d["variables"]) + else + error("plot_maps_xy: no spatial variables x,y or lat,lon found") + end + scatter!(fig, p[x_index, :], p[y_index, :], markercolor = :black, legend = false) +end + +""" + f=plot_maps_xz(fig,d,p) + Plot particles as dots in xy-plane. +""" +function plot_maps_xz(fig, d, p) + x_index = index("x", d["variables"]) + z_index = index("z", d["variables"]) + scatter!(fig, p[x_index, :], p[z_index, :], legend = false) +end + +function initialize_netcdf_output(d) + # write as series (loc,time) or maps (times,locs) + write_maps_as_series = true # default + if haskey(d, "write_maps_as_series") + write_maps_as_series = d["write_maps_as_series"] + end + # file + filedir = d["write_maps_dir"] + filename = d["write_maps_filename"] + fullfile = joinpath(filedir, filename) + if !isdir(filedir) + println("Directory for output does not exist $(filedir). Creating it.") + mkpath(filedir) + end + if isfile(fullfile) + println("Output file exists. Removing file $(fullfile)") + rm(fullfile) + end + # time + t0 = d["reftime"] + # time_atts = Dict("units" => "seconds since $(Dates.format(t0,"yyyy-mm-dd HH:MM:SS"))", + # "standard_name" => "time", "long_name" => "time", + # "comment" => "unspecified time zone", "calendar" => "gregorian" ) + time_atts = Dict("units" => "seconds since $(Dates.format(t0, "yyyy-mm-dd HH:MM:SS"))", + "standard_name" => "time", "long_name" => "Time", + "comment" => "unspecified time zone", "calendar" => "gregorian") + map_times = collect(d["write_maps_times"]) + time_dim = NcDim("time", map_times, time_atts) + # particle + npart = d["nparticles"] + vars = d["variables"] + nvars = length(vars) + part_atts = Dict("long_name" => "particle id", "units" => "1", "cf_role" => "trajectory_id", "missing_value" => 9999) + part_dim = NcDim("particles", collect(1:1:npart), part_atts) + + # global attributes + gatts = Dict("title" => "Output of particle model", "Conventions" => "CF-1.6", "featureType" => "trajectory") + + myvars = [] + dumval = d["dumval"] + for vari = 1:nvars + varname = vars[vari] + varatts = Dict("long_name" => varname, "missing_value" => Float64(dumval)) + if varname == "x" + varatts["long_name"] = "x-coordinate" + varatts["standard_name"] = "projection_x_coordinate" + varatts["units"] = "m" + elseif varname == "y" + varatts["long_name"] = "y-coordinate" + varatts["standard_name"] = "projection_y_coordinate" + varatts["units"] = "m" + elseif varname == "z" + varatts["long_name"] = "z-coordinate" + varatts["units"] = "m" + elseif varname == "lon" + varatts["long_name"] = "Longitude" + varatts["standard_name"] = "longitude" + varatts["units"] = "degrees_east" + elseif varname == "lat" + varatts["long_name"] = "Latitude" + varatts["standard_name"] = "latitude" + varatts["units"] = "degrees_north" + elseif varname == "age" + varatts["long_name"] = "Age of particles" + varatts["units"] = "s" + varatts["coordinates"] = "time lat lon" + else + varatts["coordinates"] = "time lat lon" + end + if write_maps_as_series + myvar = NcVar(varname, [time_dim, part_dim], atts = varatts, t = Float64) + else + myvar = NcVar(varname, [part_dim, time_dim], atts = varatts, t = Float64) + end + + push!(myvars, myvar) + end + + nc = NetCDF.create(fullfile, NcVar[myvars...], gatts = gatts, mode = NC_NETCDF4) + + p = d["particles"] # var x part + for vari = 1:nvars + start = [1, 1] # part x time + if write_maps_as_series + count = [1, npart] + NetCDF.putvar(myvars[vari], collect(p[vari, :]'); start = start, count = count) + else + count = [npart, 1] + NetCDF.putvar(myvars[vari], p[vari, :]; start = start, count = count) + end + end + + return (nc, myvars) + # NetCDF.close(nc) +end diff --git a/case_gloparts/Particles.jl/src/unstructured_grid.jl b/case_gloparts/Particles.jl/src/unstructured_grid.jl new file mode 100644 index 0000000..e365219 --- /dev/null +++ b/case_gloparts/Particles.jl/src/unstructured_grid.jl @@ -0,0 +1,566 @@ +# some tools for unstructured grids +using Test +using NetCDF + +debuglevel=0 + +if !@isdefined(Grid) +mutable struct Grid + #global list (by ref) even for subgrids + xnodes::Array{Float64,1} #x-coordinates of nodes + ynodes::Array{Float64,1} #y-coordinates of nodes + edges::Array{Int64,2} #list of node numbers around a cell. Row nr of entry is cell nr + #negative numbers for padding, eg [1,2,3,-1] for triangle 1,2,3 + #derived data for optimization + edge_indices::Array{Int64,1} #in case of sugrids, the indices of edges at this level + bbox::Array{Float64,1} #bounding box [xmin xmax ymin ymax] + nnodes::Array{Int64,1} #cached count of nodes foreach cell + + subgrids::Array{Grid,1} #multilevel storage for opimization + function Grid(xnodes::Array,ynodes::Array,edges::AbstractMatrix,nmin=10,spherical::Bool=false) + (nedges,maxNodesPerEdge)=size(edges) + if(maxNodesPerEdge>6) + if(nedges<=6) + edges=collect(edges') + else + println("size(edges)=($(nedges),$(MaxNodesPerEdge))") + end + end + (nedges,maxNodesPerEdge)=size(edges) + nnodes=nodes_per_cell(edges) + bbox_temp=zeros(Float64,4) + if spherical + edge_indices=zeros(Int64,0) + for i=1:nedges + compute_boundingbox!(bbox_temp,xnodes,ynodes,edges,nnodes,i) + if (bbox_temp[2]-bbox_temp[1])<180.0 + push!(edge_indices,i) + end + end + else + edge_indices=collect(1:nedges) + end + bbox=compute_boundingbox(xnodes,ynodes,edges,nnodes) + this=new(xnodes,ynodes,edges,edge_indices,bbox,nnodes,[]) + if nmin>0 + create_node_tree!(this,nmin) + end + return this + end + function Grid(xnodes::Array,ynodes::Array,edges::Array,edge_indices::Array,bbox::Array,nnodes::Array) + new(xnodes,ynodes,edges,edge_indices,bbox,nnodes,[]) + end +end +end #ifdef + +mutable struct Interpolator + grids::Array{Grid,1} #grids for each domain + outx_hash::UInt64 #hash code of previous output grid + outy_hash::UInt64 #for caching the interpolation indices and weights + cached_indices::Array{Any,1} #indices of previous interpolation + function Interpolator() + new([],hash([]),hash([]),[]) + end +end + +# +# High-level functions +# + +function add_grid!(interp::Interpolator,grid::Grid) + push!(interp.grids,grid) +end + +function get_bbox(interp::Interpolator) + bbox=copy(interp.grids[1].bbox) + for counter=2:length(interp.grids) + temp_bbox=interp.grids[counter].bbox + bbox[1]=min(bbox[1],temp_bbox[1]) #xmin + bbox[2]=max(bbox[2],temp_bbox[2]) #xmax + bbox[3]=min(bbox[3],temp_bbox[3]) #ymin + bbox[4]=max(bbox[4],temp_bbox[4]) #ymax + end + return bbox +end + +function interpolate(interp::Interpolator,xpoints,ypoints,invalues::Array) + #check for mutiple or single domain + if(typeof(invalues).parameters[1] <: Number) + #input as single domain + if length(interp.xnodes)>1 + error("Input values were given for single domain to multi-domain interpolator") + end + values=[invalues] + end + #compute hash and see if we can reuse interpolation weights + outx_hash=hash(xpoints) + outy_hash=hash(ypoints) + if (interp.outx_hash!=outx_hash)||(interp.outy_hash!=outy_hash) + #new output grid, so compute weights + println("compute new interpolation weights") + interp.outx_hash=outx_hash + interp.outy_hash=outy_hash + cached_indices=[] + for i=1:length(interp.grids) + println("grid $(i)") + @time cells=find_cells(xpoints,ypoints,interp.grids[i]) + push!(cached_indices,cells) + end + interp.cached_indices=cached_indices #store to cache for next call to this function + else + if debuglevel>1 + println("use cached interpolation weights") + end + cached_indices=interp.cached_indices #retrieve from cache + end + outvalues=zeros(Float64,size(cached_indices[1])) + for i=1:length(cached_indices) #loop over grids + get_values_by_cells!(outvalues,cached_indices[i],invalues[i]) + end + return outvalues +end + +# +# low-level functions +# + +function nodes_per_cell(edges::AbstractMatrix) + #each row of edges contains the node numbers of the cell + #padded with negative numbers + #returns a vector with the number of nodes for each cell + n,m = size(edges) + nnodes=m*ones(Int64,n) + for i=1:n + for j=m:-1:3 #assume each cell has at least two nodes + if(edges[i,j]<0) + nnodes[i]=(j-1) + end + end + end + return nnodes +end + +# compute winding number +# for simple counter clockwise polygons this gives 1 if point is inside and 0 outside +function winding_number(xpoint::AbstractFloat,ypoint::AbstractFloat,xspolygon::Array,yspolygon::Array) + wn=0 + xprev=xspolygon[end] + yprev=yspolygon[end] + for i=1:length(xspolygon) + xthis=xspolygon[i] + ythis=yspolygon[i] + # look for crossing of y=ypoint and x>xpoint, ie east of point + if((yprev=ypoint)) + #check for northward crossing east of point + s=(ypoint-yprev)/(ythis-yprev) + x=xprev+s*(xthis-xprev) + if x>xpoint + wn+=1 + end + end + if((yprev>=ypoint)&&(ythisxpoint + wn-=1 + end + end + xprev=xthis + yprev=ythis + end + return wn +end + +function find_first_cell(xpoint,ypoint,xnodes,ynodes,edges,nnodes) + #for point (xpoint,ypoint) find cell with point insides + #stops at first hit and return row number of edges array + #return -1 if no cell is found + #TODO start with linear search. Make more efficient later + ##println("x=$(xpoint) y=$(ypoint) ") + m,n=size(edges) + for i=1:m + wn=0 + xprev=xnodes[edges[i,nnodes[i]]] + yprev=ynodes[edges[i,nnodes[i]]] + for j=1:nnodes[i] + ##println("edge $(j)") + xthis=xnodes[edges[i,j]] + ythis=ynodes[edges[i,j]] + ##println("($(xprev),$(yprev))) -> ($(xthis),$(ythis))") + # look for crossing of y=ypoint and x>xpoint, ie east of point + if((yprev=ypoint)) + #check for northward crossing east of point + s=(ypoint-yprev)/(ythis-yprev) + x=xprev+s*(xthis-xprev) + if x>xpoint + wn+=1 + ##println("north") + end + end + if((yprev>=ypoint)&&(ythisxpoint + wn-=1 + ##println("south") + end + end + xprev=xthis + yprev=ythis + end + #wn now contains winding number + ##println("cell $(i) winding-number $(wn) ") + if wn!=0 + return i + end + end + return -1 +end + +function get_values_by_cells!(outvalues,cells,invalues) + for i=1:length(cells) + if(cells[i]>0) + outvalues[i]=invalues[cells[i]] + end + end +end + +function compute_boundingbox(xnodes,ynodes,edges,nnodes) + #returns array with min(x),max(x),min(y),max(y) + #only considers nodes that are part of a listed cell + xmin=xnodes[edges[1,1]] + xmax=xnodes[edges[1,1]] + ymin=ynodes[edges[1,1]] + ymax=ynodes[edges[1,1]] + m,n = size(edges) + for i=1:m + for j=1:nnodes[i] + xmin=min(xmin,xnodes[edges[i,j]]) + xmax=max(xmax,xnodes[edges[i,j]]) + ymin=min(ymin,ynodes[edges[i,j]]) + ymax=max(ymax,ynodes[edges[i,j]]) + end + end + return [xmin,xmax,ymin,ymax] +end + +function compute_boundingbox!(bbox,xnodes,ynodes,edges,nnodes,edge_index) + #returns array with min(x),max(x),min(y),max(y) + i=edge_index + #only considers nodes that are part of a listed cell + xmin=xnodes[edges[i,1]] + xmax=xnodes[edges[i,1]] + ymin=ynodes[edges[i,1]] + ymax=ynodes[edges[i,1]] + for j=1:nnodes[i] + xmin=min(xmin,xnodes[edges[i,j]]) + xmax=max(xmax,xnodes[edges[i,j]]) + ymin=min(ymin,ynodes[edges[i,j]]) + ymax=max(ymax,ynodes[edges[i,j]]) + end + bbox[1]=xmin + bbox[2]=xmax + bbox[3]=ymin + bbox[4]=ymax +end + +function update_boundingbox!(bbox,xnodes,ynodes,edges,nnodes,edge_index) + #updates bbox array with min(x),max(x),min(y),max(y) + #only considers the cell in row edge_index of array edges + i=edge_index + for j=1:nnodes[i] + bbox[1]=min(bbox[1],xnodes[edges[i,j]]) + bbox[2]=max(bbox[2],xnodes[edges[i,j]]) + bbox[3]=min(bbox[3],ynodes[edges[i,j]]) + bbox[4]=max(bbox[4],ynodes[edges[i,j]]) + end +end + +function update_boundingbox!(bbox1,bbox2) + #updates bbox1 array with min(x),max(x),min(y),max(y) + bbox1[1]=min(bbox1[1],bbox2[1]) + bbox1[2]=max(bbox1[2],bbox2[2]) + bbox1[3]=min(bbox1[3],bbox2[3]) + bbox1[4]=max(bbox1[4],bbox2[4]) +end + +function in_bbox(xpoint,ypoint,bbox::Array) + return ( (xpoint>=bbox[1]) && (xpoint<=bbox[2]) && (ypoint>=bbox[3]) && (ypoint<=bbox[4]) ) +end + +# +# medium-level functions +# + +function find_first_cell(xpoint,ypoint,grid::Grid) + #for point (xpoint,ypoint) find cell with point insides + #stops at first hit and return row number of edges array + #return -1 if no cell is found + #TODO start with linear search. Make more efficient later + ##println("x=$(xpoint) y=$(ypoint)") + if !in_bbox(xpoint,ypoint,grid.bbox) + return -1 #no chance to find anything outside bbox + end + # first check subgrids for a match + for igrid=1:length(grid.subgrids) + icell=find_first_cell(xpoint,ypoint,grid.subgrids[igrid]) + if icell>0 + return icell + end + end + #finally check local cells + m=length(grid.edge_indices) + wn=0 + s=0. + x=0. + for i=1:m + iedge=grid.edge_indices[i] + wn=0 + xprev=grid.xnodes[grid.edges[iedge,grid.nnodes[iedge]]] + yprev=grid.ynodes[grid.edges[iedge,grid.nnodes[iedge]]] + for j=1:grid.nnodes[iedge] + ##println("edge $(i))") + xthis=grid.xnodes[grid.edges[iedge,j]] + ythis=grid.ynodes[grid.edges[iedge,j]] + ##println("($(xprev),$(yprev)) -> ($(xthis),$(ythis))\n",xprev,yprev,xthis,ythis) + # look for crossing of y=ypoint and x>xpoint, ie east of point + if((yprev=ypoint)) + #check for northward crossing east of point + s=(ypoint-yprev)/(ythis-yprev) + x=xprev+s*(xthis-xprev) + if x>xpoint + wn+=1 + ##println("north") + end + end + if((yprev>=ypoint)&&(ythisxpoint + wn-=1 + ##println("south") + end + end + xprev=xthis + yprev=ythis + end + #wn now contains winding number + ##println("cell $(iednge) winding-number $(wn)") + if wn!=0 + return iedge + end + end + return -1 +end + + +function find_cells(xpoints,ypoints,grid::Grid) + #find cell numbers for each point in xpoints x ypoints + #xpoints and ypoints are vectors with grid in 2 directions + m=length(xpoints) + n=length(ypoints) + cells=zeros(Int64,m,n) + for i=1:m + for j=1:n + cells[i,j]=find_first_cell(xpoints[i],ypoints[j],grid) + end + end + return cells +end + +function create_node_tree!(grid::Grid,nmin=100) + if length(grid.edge_indices)>nmin + # recursively split grid into subgrids + n=length(grid.edge_indices) + #println("n=$(n)") + edge_indices1=zeros(Int64,n) + edge_indices2=zeros(Int64,n) + bbox1=zeros(Float64,4) + bbox2=zeros(Float64,4) + n1=0 + n2=0 + edge_indices=zeros(Int64,n) + bbox=copy(grid.bbox) + n=0 + #In which direction will we subdivide the box + if( (bbox[2]-bbox[1]) > (bbox[4]-bbox[3]) ) + # split x-direction + xsplit_low =bbox[1]+0.45*(bbox[2]-bbox[1]) + xsplit_high=bbox[1]+0.55*(bbox[2]-bbox[1]) + #println("x-split $(xsplit_low) $(xsplit_high)") + bbox_temp=zeros(Float64,4) + for i=1:length(grid.edge_indices) + iedge=grid.edge_indices[i] + #println("edge $(iedge)") + compute_boundingbox!(bbox_temp,grid.xnodes,grid.ynodes,grid.edges,grid.nnodes,iedge) + if (bbox_temp[2]<=xsplit_high ) #left + #println("left") + if n1==0 + bbox1=copy(bbox_temp) + else + update_boundingbox!(bbox1,bbox_temp) + end + n1+=1 + edge_indices1[n1]=iedge + elseif (bbox_temp[1] >= xsplit_low ) #right + #println("right") + if n2==0 + bbox2=copy(bbox_temp) + else + update_boundingbox!(bbox2,bbox_temp) + end + n2+=1 + edge_indices2[n2]=iedge + else #keep at this level + #println("between left and right") + if n==0 + bbox=copy(bbox_temp) + else + update_boundingbox!(bbox,bbox_temp) + end + n+=1 + edge_indices[n]=iedge + end + end + else + # split y-direction + ysplit_low =bbox[3]+0.45*(bbox[4]-bbox[3]) + ysplit_high=bbox[3]+0.55*(bbox[4]-bbox[3]) + #println("y-split $(ysplit_low) $(ysplit_high)") + bbox_temp=zeros(Float64,4) + for i=1:length(grid.edge_indices) + iedge=grid.edge_indices[i] + #print("edge $(iedge)") + compute_boundingbox!(bbox_temp,grid.xnodes,grid.ynodes,grid.edges,grid.nnodes,iedge) + #println(bbox_temp) + if (bbox_temp[4]<=ysplit_high ) #bottom + #println("bottom") + if n1==0 + bbox1=copy(bbox_temp) + else + update_boundingbox!(bbox1,bbox_temp) + end + n1+=1 + edge_indices1[n1]=iedge + elseif (bbox_temp[3] >= ysplit_low ) #top + #println("top") + if n2==0 + bbox2=copy(bbox_temp) + else + update_boundingbox!(bbox2,bbox_temp) + end + n2+=1 + edge_indices2[n2]=iedge + else #keep at this level + #println("between top and bottom") + if n==0 + bbox=copy(bbox_temp) + else + update_boundingbox!(bbox,bbox_temp) + end + n+=1 + edge_indices[n]=iedge + end + end + end + #println("parent,child1,child2=$(n),$(n1),$(n2)") + #dump_array(bbox1,"bbox1") + #dump_array(bbox2,"bbox2") + #child 1 + resize!(edge_indices1,n1) + #dump_array(edge_indices1,"edge_indices1") + if(n1>0) + grid1=Grid(grid.xnodes,grid.ynodes,grid.edges,edge_indices1,bbox1,grid.nnodes) + create_node_tree!(grid1,nmin) + push!(grid.subgrids,grid1) + end + #child 2 + resize!(edge_indices2,n2) + #dump_array(edge_indices2,"edge_indices2") + if(n2>0) + grid2=Grid(grid.xnodes,grid.ynodes,grid.edges,edge_indices2,bbox2,grid.nnodes) + create_node_tree!(grid2,nmin) + push!(grid.subgrids,grid2) + end + #parent + #dump_array(edge_indices,"edge_indices") + resize!(edge_indices,n) + grid.edge_indices=edge_indices + end +end + +import Base.dump + +function dump(grid::Grid) + dump_array(grid.xnodes,"xnodes") + dump_array(grid.ynodes,"ynodes") + print(" edges[] = ") + dump(grid.edges) + println("--- subgrid ---") + print("000 bbox=") + show(grid.bbox) + dump_array(grid.edge_indices," edge_indices") + for i=1:length(grid.subgrids) + dump_subgrid(grid.subgrids[i],1) + end +end + +function dump_subgrid(grid::Grid,level) + print("$(level) bbox=") + show(grid.bbox) + dump_array(grid.edge_indices," edge_indices") + for i=1:length(grid.subgrids) + dump_subgrid(grid.subgrids[i],level+1) + end +end + +function dump_array(a,name="") + s=size(a) + if(length(s)==1) + print("$(name)[1:$(length(a))]=") + if(length(a)>10) + println("[$(a[1]),$(a[2]),$(a[3]),$(a[4]),$(a[5]),..., + $(a[end-4]),$(a[end-3]),$(a[end-2]),$(a[end-1]),$(a[end])]") + elseif(length(a)==0) + println("[]") + else + show(a) + println("") + end + end +end + +""" + cell_index = find_index(interp, xpoint, ypoint) +Find the domain and index of the cell within that domain, eg the result [2,1234] +indicates the cell 1234 in the snd domain. +""" +function find_index(interp::Interpolator,xpoint,ypoint) + indices=[-1 -1] + for i=1:length(interp.grids) + cell=find_first_cell(xpoint,ypoint,interp.grids[i]) + if cell>0 + indices[1]=i + indices[2]=cell + break + end + end + return indices +end + +""" + waterlevel_at_point = apply_index(index,map_slice,9999.0) +Get the number at domain and index (given by index). The index is often the result of +the function find_index. If the cell index is [-1,-1] then the point is considered to +be outside the area covered by the cells, eg on land, and then a default value is returned. +""" +function apply_index(index,map_slice,default_value=0.0) + if index[1]>0 + return map_slice[index[1]][index[2]] + else + return default_value + end +end diff --git a/case_gloparts/Particles.jl/src/wms_client.jl b/case_gloparts/Particles.jl/src/wms_client.jl new file mode 100644 index 0000000..c006c73 --- /dev/null +++ b/case_gloparts/Particles.jl/src/wms_client.jl @@ -0,0 +1,158 @@ +Nothing + +using HTTP +using Base64 +using Dates +using Plots +using FileIO +using ImageMagick +#using TestImages + +""" +Info about some WMS servers. Some of the info could be collected using the GetCapabilities request +to the server. For now this was done manually. +""" +KnownServers=Dict() +#emodnet-bathymetry +#figure out more details (manually for now) +# https://ows.emodnet-bathymetry.eu/wms/service?SERVICE=WMS&VERSION=1.1.1&request=getcapabilities +emo=Dict("scheme" => "https", + "host" => "ows.emodnet-bathymetry.eu", + "path" => "/wms/service", + "service" => "WMS", + "version" => "1.1.1", + "layers" => ["emodnet:mean_atlas_land","emodnet:mean_multicolour","emodnet:mean_rainbowcolour"], + "styles" => ["mean_atlas_land","mean_multicolur","mean_rainbowcolour"], + "default_layer" => 3, + "srs" => "epsg:4326", + "format" => "image/png", + "extent" => [-36.0,15.0,43.0,90.0]) +KnownServers["emodnet-bathymetry"]=emo +#open streetmap OSM +# http://ows.terrestris.de/osm/service?SERVICE=WMS&VERSION=1.1.1&request=getcapabilities +osm=Dict("scheme" => "https", + "host" => "ows.terrestris.de", + "path" => "/osm/service", + "service" => "WMS", + "version" => "1.1.1", + "layers" => ["OSM-WMS","TOPO-WMS","TOPO-OSM-WMS","SRTM30-Colored-Hillshade"], + "styles" => ["default","default","default","default"], + "default_layer" => 1, + "srs" => "epsg:4326", + "format" => "image/png", + "extent" => [-180.0,-90.0,180.0,90.0]) +KnownServers["open-streetmap"]=osm +# GEBCO https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv? +# polar view https://www.gebco.net/data_and_products/gebco_web_services/north_polar_view_wms/mapserv? +gebco=Dict("scheme" => "https", + "host" => "www.gebco.net", + "path" => "/data_and_products/gebco_web_services/web_map_service/mapserv", + "service" => "WMS", + "version" => "1.1.1", + "layers" => ["GEBCO_LATEST"], + "styles" => ["default"], + "default_layer" => 1, + "srs" => "epsg:4326", + "format" => "image/png", + "extent" => [-180.0,-90.0,180.0,90.0]) +KnownServers["gebco"]=gebco + +mutable struct WmsServer + scheme::String + host::String + path::String + service::String + version::String + layer::String #selected layer + style::String + format::String + srs::String + extent::Array{Float64,1} + verbose::Int64 + """ create """ + function WmsServer() #provide a default map for convenience + label="emodnet-bathymetry" + return WmsServer(label) + end + """ + s=WmsServer("open-streetmap",2) + Create new object representing the WmsServer locally. Labels are from the keys in the KnownServers. + layer_number<1 denotes the default layer. + """ + function WmsServer(label::String,layer_number=-1) + sdata=KnownServers[label] + scheme=sdata["scheme"] + host=sdata["host"] + path=sdata["path"] + service=sdata["service"] + version=sdata["version"] + ilayer=sdata["default_layer"] + if layer_number>0 + ilayer=layer_number + end + layer=sdata["layers"][ilayer] + style=sdata["styles"][ilayer] + format=sdata["format"] + srs=sdata["srs"] + extent=copy(sdata["extent"]) + verbose=1 + return new(scheme,host,path,service,version,layer,style,format,srs,extent,verbose) + end +end +""" + img=get_map(wms_server,bbox) + +Download a wms map for a given region and load it as an image. +bbox contains xmin ymin xmax ymax +""" +function get_map(wms_server,boundingbox,width=1200,height=800) + query=Dict{String,String}() + query["SERVICE"]=wms_server.service + query["VERSION"]=wms_server.version + query["REQUEST"]="GetMap" + query["width"]="$(width)" + query["height"]="$(height)" + query["layers"]=wms_server.layer + query["styles"]=wms_server.style + query["format"]=wms_server.format + query["SRS"]=wms_server.srs + b=boundingbox + query["bbox"]="$(b[1]),$(b[2]),$(b[3]),$(b[4])" + #println(query) + url=HTTP.URI(; scheme=wms_server.scheme, host=wms_server.host, path=wms_server.path, query=query) + println(url) + #handle cache + if !isdir(".cache") + println("Creating cache folder .cache") + mkdir(".cache") + else + println("Cache folder .cache already exists") + end + url_hash=hash(url) + filename=joinpath(".cache","$(wms_server.host)_$(url_hash).png") + if !isfile(filename) + println("Cache file $(filename) does not exist yet. Download new map.") + HTTP.download(string(url),filename ; verbose=wms_server.verbose) + else + println("Cache file $(filename) already exists. Using cached file") + end + img = load(filename) + return img +end + + +""" + plot_image(image,bbox) + +Plot and image scaled to a given range, such that other elements added to the graph +will appear in the right position. +image +bbox contains xmin ymin xmax ymax +""" +function plot_image(image,boundbox) + #f=plot(range(boundbox[1], boundbox[3],length=size(image,1)), range(boundbox[2], boundbox[4], length=size(image,2)), image[end:-1:1,:], size=(size(image,2),size(image,1)),yflip=false) + f=plot(range(boundbox[1], stop=boundbox[3],length=size(image,1)), range(boundbox[2], stop=boundbox[4], length=size(image,2)), image[end:-1:1,:], size=(size(image,2),size(image,1)),yflip=false) + return f +end + +Nothing diff --git a/case_gloparts/README.txt b/case_gloparts/README.txt new file mode 100644 index 0000000..ea70052 --- /dev/null +++ b/case_gloparts/README.txt @@ -0,0 +1,4 @@ +12 Feb 2024 - J. Groenenboom + +The folders 'workDir' and 'Particles.jl' contain the scripts as they are currently used within GLOPARTS. +The folders 'test' and 'test_data' contain scripts and files for functionalities related to GLOPARTS (therefore, they are not included in the general tests). diff --git a/case_gloparts/test/test_cmems_grid.jl b/case_gloparts/test/test_cmems_grid.jl new file mode 100644 index 0000000..79f5850 --- /dev/null +++ b/case_gloparts/test/test_cmems_grid.jl @@ -0,0 +1,96 @@ +# Interact with CMEMS netcdf output map files +# +using Dates + +# +# test +# + +function test1() + #init + cmems_data = CmemsData("../test_data","cmems_20140301_06.nc") + + ice1=load_map_slice(cmems_data,"siconc",1) # sea-ice concentration + @test size(ice1)==(481,181) + @test isnan(ice1[1,1]) + @test isapprox(ice1[1,end],0.9666,atol=0.001) + + t0=get_reftime(cmems_data) + @test t0==DateTime(2014,3,1) + + times=get_times(cmems_data,t0) + @test times[1]≈43200.0 + @test times[2]≈129600.0 + + t2=as_DateTime(cmems_data,t0,times[2]) + @test t2==DateTime("2014-03-02T12:00:00") + + + u=initialize_interpolation(cmems_data,"uo",t0,NaN) #water velocity x-dir + u1=u(20.0,75.0,0.0,43200.0) + @test u1≈-0.04763681870922009 + u2=u(20.0,75.0,0.0,129600.0) + @test u2≈-0.037204620036251405 + v=initialize_interpolation(cmems_data,"vo",t0,NaN) #water velocity y-dir + v1=v(20.0,75.0,0.0,43200.0) + @test v1≈0.04879613983597007 +end + +function test2() + #init + cmems_data = CmemsData("../test_data","u0_2021-10-29_00-00-00_2021-11-07_00-00-00.nc") + + u1=load_map_slice(cmems_data,"uo",1) # u-velocity + @test size(u1)==(733,277) + @test isnan(u1[1,1]) + @test isapprox(u1[100,100],0.21790215,atol=0.001) + + t0=get_reftime(cmems_data) + @test t0==DateTime(2021,10,29) + + times=get_times(cmems_data,t0) + @test times[1]≈1800.0 + @test times[2]≈5400.0 + + t2=as_DateTime(cmems_data,t0,times[2]) + @test t2==DateTime("2021-10-29T01:30:00") + + + u=initialize_interpolation(cmems_data,"uo",t0,NaN) #water velocity x-dir + u1=u(-73.0,35.0,0.0,1800.0) + @test u1≈0.04947685987763097 + u2=u(-73.0,35.0,0.0,3600.0) + @test u2≈0.037575478067336805 +end + +function test3() + #init + cmems_datas = CmemsData("../test_data", r"cmems_map_u_.+_time..nc"; lon = "x", lat = "y") # Regex + @test length(cmems_datas) == 2 + cmems_datas = CmemsData("../test_data", "cmems_map_u_*_time*.nc"; lon = "x", lat = "y") # Glob + @test length(cmems_datas) == 2 + cmems_data = cmems_datas[1] + + t0 = get_reftime(cmems_data) + @test t0 == DateTime(2022,05,20) + + times = get_times(cmems_data,t0) + @test times[1] ≈ 0.0 + @test times[2] ≈ 3600.0 + + t2 = as_DateTime(cmems_data, t0, times[2]) + @test t2 == DateTime("2022-05-20T01:00:00") + + + u = initialize_interpolation(cmems_datas, "uo", t0, NaN) #water velocity x-dir + u1 = u(0.5, 52, 0.0, 1800.0) # only available in 'time1'-file + @test u1 ≈ -1 + u1 = u(0.5, 52, 0.0, 21600.0) # available in both 'time1'- and 'time2'-file (so pick 'time2'-file) + @test u1 ≈ 1 + u1 = u(0.5, 52, 0.0, 90000.0) # only available in 'time2'-file + @test u1 ≈ 1 +end + +test1() +test2() +test3() diff --git a/case_gloparts/test/test_dflow.jl b/case_gloparts/test/test_dflow.jl new file mode 100644 index 0000000..f7907b4 --- /dev/null +++ b/case_gloparts/test/test_dflow.jl @@ -0,0 +1,162 @@ +# Interact with dflow netcdf output map files +# +using Dates + +# +# test +# + +function test1() + #init + dflow_map=load_nc_info("../test_data",r"estuary_...._map.nc") + @test length(dflow_map)==2 + @test size(dflow_map[2].vars["s1"])==(103,25) + interp=load_dflow_grid(dflow_map,50,false) + @test length(interp.grids)==2 + + #interpolate a field to a regular grid + sealevel=load_nc_map_slice(dflow_map,"s1",10) + @test length(sealevel)==2 + @test length(sealevel[1])==103 + @test length(sealevel[2])==103 + @test sealevel[2][12]==-0.3979754473027684 + + u_velocity=load_nc_map_slice(dflow_map,"ucx",10) + @test length(u_velocity)==2 + @test length(u_velocity[1])==103 + @test length(u_velocity[2])==103 + @test u_velocity[1][103]==-0.2017744781187035 + + xpoints=collect(range(0.,stop=100000.,length=300)) + ypoints=collect(range(0.,stop=500.,length=100)) + sealevel_interp=interpolate(interp,xpoints,ypoints,sealevel) + @test size(sealevel_interp)==(300,100) + @test sealevel_interp[23,48]==-0.17045316506644842 + + u_velocity_interp=interpolate(interp,xpoints,ypoints,u_velocity) + @test size(u_velocity_interp)==(300,100) + #heatmap(xpoints,ypoints,u_velocity_interp') + # interpolate for one point only + ind=find_index(interp,10000.0,200.0) + @test ind==[2 84] + ux=apply_index(ind,u_velocity,-9999.) + @test ux==-0.14349077430813084 + + # u,v interpolation functions + t0=get_reftime(dflow_map) + @test t0==DateTime(1991,1,1) + u1,v1=initialize_interpolation(dflow_map,interp,t0) + u_value=u1(100.0,100.0,0.0,0.0) + #@test u_value==-0.9088566953087289 + @test u_value==-0.0 #TODO Check later. Is this really true? +end + +# this test needs files that do not fit in github +# only run if the files exist +function test2() + #init + dflow_map=load_nc_info("../test_data",r"DCSM-FM_0_5nm_...._map.nc") + @test length(dflow_map)==20 + @test size(dflow_map[2].vars["mesh2d_s1"])==(31553,170) + interp=load_dflow_grid(dflow_map,50,false) + @test length(interp.grids)==20 + + ##interpolate a field to a regular grid + #sealevel=load_nc_map_slice(dflow_map,"mesh2d_s1",10) + #xpoints=collect(range(-15.0,stop=13.0,length=1200)) + #ypoints=collect(range(43.0,stop=64.0,length=1000)) + #sealevel_interp=interpolate(interp,xpoints,ypoints,sealevel) + #Plots.default(:size,[1200,1000]) + #heatmap(xpoints,ypoints,sealevel_interp', clims=(-2.0,2.0)) + + # u,v interpolation functions + t0=get_reftime(dflow_map) + @test t0==DateTime(2012,12,22) + u2,v2=initialize_interpolation(dflow_map,interp,t0) + for istep=1:5 + u_value=u2(1.0,51.0,0.0,864000.0+3600.0*istep) + println("$(istep) u=$(u_value)") + if istep==5 + @test u_value==0.04639523554494074 + end + end +end + +function test3() + #init + dflow_map=load_nc_info("../test_data","dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0000_map.nc") + @test length(dflow_map)==1 + dflow_map=load_nc_info("../test_data","dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0000_map.nc") + @test length(dflow_map)==1 + dflow_map=load_nc_info("../test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/",r"cb_2d_...._map.nc") + @test length(dflow_map)==2 + dflow_map=load_nc_info("../test_data","dfm_multiple_runs_and_domains/*/DFM_OUTPUT_cb_2d/cb_2d_*_map.nc") + @test length(dflow_map)==4 + @test size(dflow_map[1].vars["mesh2d_ucx"])==(317,25) + interp=load_dflow_grid(dflow_map,50,true) + @test length(interp.grids)==2 + + # u,v interpolation functions + t0=get_reftime(dflow_map) + @test t0==DateTime(2001,1,1) + u1,v1=initialize_interpolation(dflow_map,interp,t0) + u_value=u1(3.33, 47.99, 0.0, 1800.0) + @test u_value==0.0435357731534879 + u_value=u1(3.33, 47.99, 0.0, 86399.0) + @test u_value==-0.048490442244461425 + u_value=u1(3.33, 47.99, 0.0, 43200.0) + @test u_value==0 + u_value=u1(3.33, 47.99, 0.0, 43199.0) + @test u_value==0.04905390667374792 +end + +function test4() + # open file for 3d lock exchange + dflow_map=load_nc_info("../test_data","locxxz_fullgrid_map.nc") + @test length(dflow_map)==1 + + #load and inspect a layer of salinity + ilayer=5 + itime=10 + sal=load_nc_map_slice(dflow_map,"mesh2d_sa1",itime,ilayer) + @test length(sal)==1 + @test length(sal[1])==400 + @test sal[1][12]≈5.000000000000 + # inspect vertical eddy viscosity + ilayer=2 + itime=10 + nu=load_nc_map_slice(dflow_map,"mesh2d_vicwwu",itime,ilayer) + @test length(nu)==1 + @test length(nu[1])==1201 + @test nu[1][100]≈2.181251740966235e-5 + # inspect vertical eddy viscosity at FACES + ilayer=2 + itime=10 + nu_face=load_nc_map_slice_at_faces(dflow_map,"mesh2d_vicwwu",itime,ilayer) + @test length(nu_face)==1 + @test length(nu_face[1])==400 + @test nu_face[1][100]≈2.2144234788637896e-5 + #load and inspect a layer of x_velocity + ilayer=1 + itime=10 + u=load_nc_map_slice(dflow_map,"mesh2d_ucx",itime,ilayer) + @test length(u)==1 + @test length(u[1])==400 + @test u[1][100]≈0.6020406834128101 +end + +# +# run tests +# + +test1() + +#test2 needs large input files that are not present in the repository. +#only run tests if the files have been added (manually for now) +if isfile(joinpath("../test_data","DCSM-FM_0_5nm_0000_map.nc")) + test2() +end + +test3() + +test4() diff --git a/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d.dia b/case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d.dia similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d.dia rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d.dia diff --git a/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0000.dia b/case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0000.dia similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0000.dia rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0000.dia diff --git a/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0000_his.nc b/case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0000_his.nc similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0000_his.nc rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0000_his.nc diff --git a/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0000_map.nc b/case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0000_map.nc similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0000_map.nc rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0000_map.nc diff --git a/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0001.dia b/case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0001.dia similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0001.dia rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0001.dia diff --git a/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0001_map.nc b/case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0001_map.nc similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0001_map.nc rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0001_map.nc diff --git a/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_interpreted_idomain_bendcurv_net.nc b/case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_interpreted_idomain_bendcurv_net.nc similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_interpreted_idomain_bendcurv_net.nc rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_interpreted_idomain_bendcurv_net.nc diff --git a/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/Velocity.bc b/case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/Velocity.bc similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/01_flow_to_right/Velocity.bc rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/Velocity.bc diff --git a/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/bendcurv_0000_net.nc b/case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/bendcurv_0000_net.nc similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/01_flow_to_right/bendcurv_0000_net.nc rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/bendcurv_0000_net.nc diff --git a/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/bendcurv_0001_net.nc b/case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/bendcurv_0001_net.nc similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/01_flow_to_right/bendcurv_0001_net.nc rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/bendcurv_0001_net.nc diff --git a/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/bendcurv_net.nc b/case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/bendcurv_net.nc similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/01_flow_to_right/bendcurv_net.nc rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/bendcurv_net.nc diff --git a/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb3d-1_bnd.ext b/case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb3d-1_bnd.ext similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb3d-1_bnd.ext rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb3d-1_bnd.ext diff --git a/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb3d-1_obs.xyn b/case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb3d-1_obs.xyn similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb3d-1_obs.xyn rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb3d-1_obs.xyn diff --git a/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb3d-1_thd.pli b/case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb3d-1_thd.pli similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb3d-1_thd.pli rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb3d-1_thd.pli diff --git a/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb_2d.mdu b/case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb_2d.mdu similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb_2d.mdu rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb_2d.mdu diff --git a/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb_2d_0000.cache b/case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb_2d_0000.cache similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb_2d_0000.cache rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb_2d_0000.cache diff --git a/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb_2d_0000.mdu b/case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb_2d_0000.mdu similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb_2d_0000.mdu rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb_2d_0000.mdu diff --git a/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb_2d_0001.cache b/case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb_2d_0001.cache similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb_2d_0001.cache rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb_2d_0001.cache diff --git a/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb_2d_0001.mdu b/case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb_2d_0001.mdu similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb_2d_0001.mdu rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/cb_2d_0001.mdu diff --git a/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/dimr_config.xml b/case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/dimr_config.xml similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/01_flow_to_right/dimr_config.xml rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/dimr_config.xml diff --git a/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/inner-east.pli b/case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/inner-east.pli similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/01_flow_to_right/inner-east.pli rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/inner-east.pli diff --git a/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/inner-south.pli b/case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/inner-south.pli similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/01_flow_to_right/inner-south.pli rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/inner-south.pli diff --git a/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/run_parallel.bat b/case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/run_parallel.bat similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/01_flow_to_right/run_parallel.bat rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/01_flow_to_right/run_parallel.bat diff --git a/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d.dia b/case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d.dia similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d.dia rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d.dia diff --git a/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d_0000.dia b/case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d_0000.dia similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d_0000.dia rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d_0000.dia diff --git a/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d_0000_his.nc b/case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d_0000_his.nc similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d_0000_his.nc rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d_0000_his.nc diff --git a/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d_0000_map.nc b/case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d_0000_map.nc similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d_0000_map.nc rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d_0000_map.nc diff --git a/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d_0001.dia b/case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d_0001.dia similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d_0001.dia rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d_0001.dia diff --git a/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d_0001_map.nc b/case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d_0001_map.nc similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d_0001_map.nc rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_OUTPUT_cb_2d/cb_2d_0001_map.nc diff --git a/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_interpreted_idomain_bendcurv_net.nc b/case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_interpreted_idomain_bendcurv_net.nc similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_interpreted_idomain_bendcurv_net.nc rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/DFM_interpreted_idomain_bendcurv_net.nc diff --git a/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/Velocity.bc b/case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/Velocity.bc similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/02_flow_to_left/Velocity.bc rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/Velocity.bc diff --git a/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/bendcurv_0000_net.nc b/case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/bendcurv_0000_net.nc similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/02_flow_to_left/bendcurv_0000_net.nc rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/bendcurv_0000_net.nc diff --git a/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/bendcurv_0001_net.nc b/case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/bendcurv_0001_net.nc similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/02_flow_to_left/bendcurv_0001_net.nc rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/bendcurv_0001_net.nc diff --git a/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/bendcurv_net.nc b/case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/bendcurv_net.nc similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/02_flow_to_left/bendcurv_net.nc rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/bendcurv_net.nc diff --git a/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb3d-1_bnd.ext b/case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb3d-1_bnd.ext similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb3d-1_bnd.ext rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb3d-1_bnd.ext diff --git a/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb3d-1_obs.xyn b/case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb3d-1_obs.xyn similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb3d-1_obs.xyn rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb3d-1_obs.xyn diff --git a/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb3d-1_thd.pli b/case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb3d-1_thd.pli similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb3d-1_thd.pli rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb3d-1_thd.pli diff --git a/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb_2d.mdu b/case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb_2d.mdu similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb_2d.mdu rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb_2d.mdu diff --git a/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb_2d_0000.cache b/case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb_2d_0000.cache similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb_2d_0000.cache rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb_2d_0000.cache diff --git a/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb_2d_0000.mdu b/case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb_2d_0000.mdu similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb_2d_0000.mdu rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb_2d_0000.mdu diff --git a/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb_2d_0001.cache b/case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb_2d_0001.cache similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb_2d_0001.cache rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb_2d_0001.cache diff --git a/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb_2d_0001.mdu b/case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb_2d_0001.mdu similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb_2d_0001.mdu rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/cb_2d_0001.mdu diff --git a/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/dimr_config.xml b/case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/dimr_config.xml similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/02_flow_to_left/dimr_config.xml rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/dimr_config.xml diff --git a/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/inner-east.pli b/case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/inner-east.pli similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/02_flow_to_left/inner-east.pli rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/inner-east.pli diff --git a/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/inner-south.pli b/case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/inner-south.pli similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/02_flow_to_left/inner-south.pli rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/inner-south.pli diff --git a/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/run_parallel.bat b/case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/run_parallel.bat similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/02_flow_to_left/run_parallel.bat rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/run_parallel.bat diff --git a/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/unstruc.dia b/case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/unstruc.dia similarity index 100% rename from test_data/dfm_multiple_runs_and_domains/02_flow_to_left/unstruc.dia rename to case_gloparts/test_data/dfm_multiple_runs_and_domains/02_flow_to_left/unstruc.dia diff --git a/case_gloparts/workDir/config.toml b/case_gloparts/workDir/config.toml new file mode 100644 index 0000000..950cb8a --- /dev/null +++ b/case_gloparts/workDir/config.toml @@ -0,0 +1,55 @@ +id = 1 # label particles with id per source, like 1 or [1, 2] +x = 1.24 # longitude like -69.0 or [-69.0, 70.0] +y = 51.68 # latitude like 35.0 or [35.0, 36.0] + +npartpersource = 10 # number of particles per source +radius = 0.01 # initial spread in degrees +reftime = 2023-09-20T00:00:00Z # reference time for simulation - default tstart=0 +tstart = 0 # start time in seconds since reftime +tend = 259200 # 259200=3 days +dt = 10 # timstep in seconds + +# surface currents +current_filetype="cmems" # valid types are "cmems", "delft3d-fm", "zero" +current_dir = "/opt/fews/datacollect/archive_data" +current_x_filename = "CMEMS_currents_u_fc_*.nc" # separate files for u=east and v=north dirs +current_y_filename = "CMEMS_currents_v_fc_*.nc" # Wildcards or regex can be used (e.g. "currents_v_*.nc" or "currents_v_.+.nc") +current_x_var = "x" # variable for x-coordinate (default "longitude") +current_y_var = "y" # variable for y-coordinate (default "latitude") +current_ucx_var = "uo" # variable for flow velocity in x/longitude-direction (default for CMEMS: "uo") +current_ucy_var = "vo" # variable for flow velocity in y/latitude-direction (default for CMEMS: "vo") + +current2_filetype = "delft3d-fm" # valid types are "cmems", "delft3d-fm", "zero" +current2_dir = "/opt/fews/datacollect/archive_data" +current2_filename = "DflowFM_gtsmv4_grid_*_fc.nc" # Wildcards or regex can be used (e.g. "currents_v_*.nc" or "currents_v_.+.nc") +current2_ucx_var = "current_u" # variable for currents in x-coordinate (default "mesh2d_ucx", fews export "currents_u") +current2_ucy_var = "current_v" # variable for currents in y-coordinate (default "mesh2d_ucy", fews export "currents_v") + +# winds +wind_filetype="gfs" # valid types are "gfs", "delft3d-fm", "zero" +wind_dir = "/opt/fews/datacollect/archive_data" +wind_filename = "gfs025_fc.nc" # single for u=east and v=north dirs +wind_x_var = "x" # variable for x-coordinate (default "x") +wind_y_var = "y" # variable for y-coordinate (default "y") +wind_x_wrap = false # shift longitudes from [0 360] to [-180 180] (default false), set to true when using together with cmems [-180, 180] + + +# Defaults + +# Model +K = 5.0 # Eddy diffusion m2/s +leeway_coeff = 0.0144 # 0.016 * 0.9 + + +# write particle tracks to netcdf +write_maps = true # write to netcdf true or false +write_maps_dir = "../workDir/netcdf_output" # relative or absolute folder name +write_maps_filename = "output.nc" # name of file +write_maps_interval = 3600 # write output at tstart:interval:tend in seconds + +# write pictures to disk +plot_maps = false # write plots true or false +plot_maps_folder = "../workDir/images_output" # relative or absolute folder name +plot_maps_interval = 3600 # plot maps at tstart:interval:tend in seconds +#plot_background_source = "gebco" # "emodnet-bathymetry", "open-streetmap" or "gebco" +#bbox = [-90.0,20.0,-10.0,50.0] # spatial extent for map plots diff --git a/case_gloparts/workDir/config_template.toml b/case_gloparts/workDir/config_template.toml new file mode 100644 index 0000000..3c6744d --- /dev/null +++ b/case_gloparts/workDir/config_template.toml @@ -0,0 +1,55 @@ +id = 1 # label particles with id per source, like 1 or [1, 2] +x = $Longitude$ # longitude like -69.0 or [-69.0, 70.0] +y = $Latitude$ # latitude like 35.0 or [35.0, 36.0] + +npartpersource = $number_particle$ # number of particles per source +radius = $radius_degree$ # initial spread in degrees +reftime = $release_time$ # reference time for simulation - default tstart=0 +tstart = 0 # start time in seconds since reftime +tend = $simulation_period_seconds$ # 1983600=552hours flow data stops here +dt = 10 # timstep in seconds + +# surface currents +current_filetype="cmems" # valid types are "cmems", "delft3d-fm", "zero" +current_dir = "$FOLDER_CMEMS_MAPS_FOR_PART_ARCHIVE$" +current_x_filename = "CMEMS_currents_u_fc_*.nc" # separate files for u=east and v=north dirs +current_y_filename = "CMEMS_currents_v_fc_*.nc" # Wildcards or regex can be used (e.g. "currents_v_*.nc" or "currents_v_.+.nc") +current_x_var = "x" # variable for x-coordinate (default "longitude") +current_y_var = "y" # variable for y-coordinate (default "latitude") +current_ucx_var = "uo" # variable for flow velocity in x/longitude-direction (default for CMEMS: "uo") +current_ucy_var = "vo" # variable for flow velocity in y/latitude-direction (default for CMEMS: "vo") + +current2_filetype = "delft3d-fm" # valid types are "cmems", "delft3d-fm", "zero" +current2_dir = "$FOLDER_DELFT3DFM_MAPS_FOR_PART_ARCHIVE$" +current2_filename = "DflowFM_gtsmv4_grid_*_fc.nc" # Wildcards or regex can be used (e.g. "currents_v_*.nc" or "currents_v_.+.nc") +current2_ucx_var = "current_u" # variable for currents in x-coordinate (default "mesh2d_ucx", fews export "currents_u") +current2_ucy_var = "current_v" # variable for currents in y-coordinate (default "mesh2d_ucy", fews export "currents_v") + +# winds +wind_filetype="gfs" # valid types are "gfs", "delft3d-fm", "zero" +wind_dir = "$FOLDER_GFS_MAPS_FOR_PART_ARCHIVE$" +wind_filename = "gfs_winds.nc" # single for u=east and v=north dirs +wind_x_var = "x" # variable for x-coordinate (default "x") +wind_y_var = "y" # variable for y-coordinate (default "y") +wind_x_wrap = false # shift longitudes from [0 360] to [-180 180] (default false), set to true when using together with cmems [-180, 180] + + +# Defaults + +# Model +K = 500.0 # Eddy diffusion m2/s +leeway_coeff = 0.0144 # 0.016 * 0.9 + + +# write particle tracks to netcdf +write_maps = true # write to netcdf true or false +write_maps_dir = "../workDir/netcdf_output" # relative or absolute folder name +write_maps_filename = "output.nc" # name of file +write_maps_interval = 3600 # write output at tstart:interval:tend in seconds + +# write pictures to disk +plot_maps = false # write plots true or false +plot_maps_folder = "../workDir/images_output" # relative or absolute folder name +plot_maps_interval = 3600 # plot maps at tstart:interval:tend in seconds +#plot_background_source = "gebco" # "emodnet-bathymetry", "open-streetmap" or "gebco" +#bbox = [-90.0,20.0,-10.0,50.0] # spatial extent for map plots diff --git a/case_gloparts/workDir/run_julia.bat b/case_gloparts/workDir/run_julia.bat new file mode 100644 index 0000000..50a0112 --- /dev/null +++ b/case_gloparts/workDir/run_julia.bat @@ -0,0 +1,4 @@ +@ECHO OFF +ECHO This is to execute Julia Global Model in GLOSSIS + +julia --project= ../Particles.jl/drifters.jl ../workDir/config.toml > out.txt diff --git a/case_gloparts/workDir/run_julia.sh b/case_gloparts/workDir/run_julia.sh new file mode 100644 index 0000000..5726425 --- /dev/null +++ b/case_gloparts/workDir/run_julia.sh @@ -0,0 +1,3 @@ +#! /bin/bash +export PATH=$PATH:/opt/fews/models/julia-1.7.3/bin +julia --project= ../Particles.jl/drifters.jl ../workDir/config.toml > out.txt diff --git a/drifters.jl b/drifters.jl index db4d059..20002cd 100644 --- a/drifters.jl +++ b/drifters.jl @@ -53,7 +53,7 @@ end # simulation timing d["time_direction"] = :forwards # :forwards or :backwards -if !haskey(d, "start") +if !haskey(d, "tstart") d["tstart"] = 0.0 end if !haskey(d, "tend") @@ -358,45 +358,3 @@ end d["f"] = f! run_simulation(d) - -if d["write_maps"] && haskey(d, "npartpersource") && d["npartpersource"] > 1 - import NetCDF - using NetCDF - import Statistics - using Statistics - nsources = d["nsources"] - npartpersource = d["npartpersource"] - - fullfile = joinpath(d["write_maps_dir"], d["write_maps_filename"]) - file = NetCDF.open(fullfile) - gatts = file.gatts - time = ncread(fullfile, "time") - ntimes = length(time) - time_atts = file["time"].atts - finalize(file) - - fullfile_mean = joinpath(d["write_maps_dir"], "source-averaged_" * d["write_maps_filename"]) - if isfile(fullfile_mean) - println("Source-averaged output file exists. Removing file $(fullfile_mean)") - rm(fullfile_mean) - end - nc = NetCDF.create(fullfile_mean, gatts = gatts, mode = NC_NETCDF4) - - for varname in keys(file.vars) - dimnames = [file[varname].dim[i].name for i = 1:file[varname].ndim] - if "time" in dimnames && "particles" in dimnames - data = ncread(fullfile, varname) - nccreate(fullfile_mean, varname, "time", time, "sources", collect(1:1:nsources)) - data_mean = zeros(ntimes, nsources) - for srci = 1:nsources - ind1 = (srci - 1) * npartpersource + 1 - ind2 = srci * npartpersource - data_mean[:,srci] = mean(data[:,ind1:ind2], dims = 2) #todo: take nanmean or skipmissing to avoid mean([1 2 NaN/missing 5]) to become NaN - end - ncwrite(data_mean, fullfile_mean, varname) - ncputatt(fullfile_mean, varname, file[varname].atts) - end - end - ncputatt(fullfile_mean, "time", time_atts) - finalize(fullfile_mean) -end diff --git a/src/cmems_grid.jl b/src/cmems_grid.jl index 04c55c9..c8a7c22 100644 --- a/src/cmems_grid.jl +++ b/src/cmems_grid.jl @@ -14,7 +14,6 @@ using NetCDF using Dates -using Glob debuglevel = 1 #0-nothing, larger more output @@ -28,28 +27,13 @@ mutable struct CmemsData """ Constructor cmems_data = CmemsData(".", "my_cmems_file.nc") - cmems_datas = CmemsData("data/2022", r"07/CMEMS/my_cmems_file_part.+.nc") # using Regex - cmems_datas = CmemsData("data/2022", "*/CMEMS/my_cmems_file_part*.nc") # using Glob (* = wildcard) """ function CmemsData(path, filename; lon = "longitude", lat = "latitude") - map = [] - if isa(filename, Regex) - filenames = filter(x -> ~isnothing(match(filename, x)), readdir(path)) - filenames = [joinpath(path, filename) for filename = filenames] - elseif occursin("*", filename) - filenames = glob(filename, path) - else - filenames = [joinpath(path, filename)] - end - for filename = filenames - file = NetCDF.open(filename) - x = collect(file.vars[lon]) - y = collect(file.vars[lat]) - grid = CartesianXYGrid(x, y, true) - push!(map, new(file, grid)) - end - length(map) != 1 || (map = map[1]) # backward compatibility - return map + file = NetCDF.open(joinpath(path, filename)) + x = collect(file.vars[lon]) + y = collect(file.vars[lat]) + grid = CartesianXYGrid(x, y, true) + return new(file, grid) end end @@ -63,28 +47,13 @@ mutable struct GFSData """ Constructor gfs_data = GFSData(".","my_gfs_file.nc") - gfs_datas = GFSData("data/2022", r"07/CMEMS/my_gfs_file_part.+.nc") # using Regex - gfs_datas = GFSData("data/2022", "*/CMEMS/my_gfs_file_part*.nc") # using Glob (* = wildcard) """ function GFSData(path, filename; lon = "x", lat = "y") - map = [] - if isa(filename, Regex) - filenames = filter(x -> ~isnothing(match(filename, x)), readdir(path)) - filenames = [joinpath(path, filename) for filename = filenames] - elseif occursin("*", filename) - filenames = glob(filename, path) - else - filenames = [joinpath(path, filename)] - end - for filename = filenames - file = NetCDF.open(filename) - x = collect(file.vars[lon]) - y = collect(file.vars[lat]) - grid = CartesianXYGrid(x, y, true) - push!(map, new(file, grid)) - end - length(map) != 1 || (map = map[1]) # backward compatibility - return map + file = NetCDF.open(filename) + x = collect(file.vars[lon]) + y = collect(file.vars[lat]) + grid = CartesianXYGrid(x, y, true) + return new(file, grid) end end @@ -193,54 +162,27 @@ end p = initialize_interpolation(cmems,"msl",t0) Create an interpolation function p(x,y,z,t) """ -function initialize_interpolation(data, varname::String, reftime::DateTime, dummy = 0.0, cache_direction::Symbol = :forwards; wrap = false) - !isa(data, CmemsData) || (data = [data]) # to allow for looping over an array of CmemsData - !isa(data, GFSData) || (data = [data]) # to allow for looping over an array of GFSData - - times_per_file = [get_times(data[i], reftime) for i in 1:length(data)] - - function get_xyt(data, data_ind) - times = times_per_file[data_ind] - values = data[data_ind].file.vars[varname] #TODO more checks - missing_value = values.atts["_FillValue"] - if haskey(values.atts, "scale_factor") - scaling = values.atts["scale_factor"] - else - scaling = 1.0 - end - if haskey(values.atts, "add_offset") - offset = values.atts["add_offset"] - else - offset = 0.0 - end - xyt = CartesianXYTGrid(data[data_ind].grid, times, values, varname, missing_value, scaling, offset, cache_direction) - return xyt +function initialize_interpolation(data::CmemsData, varname::String, reftime::DateTime, dummy = 0.0, cache_direction::Symbol = :forwards; wrap = false) + times = get_times(data, reftime) + values = data.file.vars[varname] #TODO more checks + missing_value = values.atts["_FillValue"] + if haskey(values.atts, "scale_factor") + scaling = values.atts["scale_factor"] + else + scaling = 1.0 end - - xyts = Array{Any,1}(undef, length(data)) - function update_xyt(x,y,t) - intime = [(t >= times[1]) && (t <= times[end]) for times in times_per_file] - inspace = [in_bbox(data[i].grid,x,y) for i in 1:length(data)] - data_ind_needed = findlast((intime .* inspace)) - if isnothing(data_ind_needed) - data_ind_needed = 1 # just continue with first map and get an error-message during the interpolation - end - if isassigned(xyts, data_ind_needed) - (debuglevel >= 2) && println("data is already cached") - else # load additional xyt - (debuglevel >= 1) && println("t = $(t) - Reading data from file: ../$(joinpath(splitpath(data[data_ind_needed].file.name)[end-2:end]...))") - xyts[data_ind_needed] = get_xyt(data, data_ind_needed) - end - xyt = xyts[data_ind_needed] - return xyt + if haskey(values.atts, "add_offset") + offset = values.atts["add_offset"] + else + offset = 0.0 end - + xyt = CartesianXYTGrid(data.grid, times, values, varname, missing_value, scaling, offset, cache_direction) + function f(x, y, z, t) # GFS Grid is 0:360 instead of -180:180 if wrap && (x < 0) x += 360 end - xyt = update_xyt(x,y,t) value = interpolate(xyt, x, y, t, dummy) return value end diff --git a/src/dflow.jl b/src/dflow.jl index d1923d1..e297477 100644 --- a/src/dflow.jl +++ b/src/dflow.jl @@ -3,7 +3,6 @@ using NetCDF using Dates -using Glob # include("unstructured_grid.jl") debuglevel = 1 # 0-nothing, larger more output @@ -46,14 +45,12 @@ function load_nc_info(path, filename) if isa(filename, Regex) # filename used to be a Regex-type (filename_regex::Regex) filenames = filter(x -> ~isnothing(match(filename, x)), readdir(path)) filenames = [joinpath(path, filename) for filename = filenames] - elseif occursin("*", filename) - filenames = glob(filename, path) else filenames = [joinpath(path, filename)] end for filename = filenames - @info filename - push!(map, NetCDF.open(filename)) + @info filename + push!(map, NetCDF.open(filename)) end return map end @@ -67,21 +64,11 @@ load_nc_info. function load_dflow_grid(map, nmin=50, spherical=true) interp = Interpolator() println("compute index:") - map_names = [] for i = 1:length(map) - # only load the grid with a certain filename once - map_name = splitpath(map[i].name)[end] - if in(map_name, map_names) - break - else - push!(map_names, map_name) - end if haskey(map[i].vars, "NetElemNode") # Old FM variable name edges_temp = map[i].vars["NetElemNode"][:,:] elseif haskey(map[i].vars, "mesh2d_face_nodes") # New FM variable name edges_temp = map[i].vars["mesh2d_face_nodes"][:,:] - elseif haskey(map[i].vars, "Mesh_face_nodes") # FEWS variable name - edges_temp = map[i].vars["Mesh_face_nodes"][:,:] else error("Variable 'mesh2d_face_nodes' (or similar) is missing in D-Flow FM file") end @@ -91,9 +78,6 @@ function load_dflow_grid(map, nmin=50, spherical=true) elseif haskey(map[i].vars, "mesh2d_node_x") # New FM variable names xnodes_temp = map[i].vars["mesh2d_node_x"][:] ynodes_temp = map[i].vars["mesh2d_node_y"][:] - elseif haskey(map[i].vars, "Mesh_node_x") # FEWS variable name - xnodes_temp = map[i].vars["Mesh_node_x"][:] - ynodes_temp = map[i].vars["Mesh_node_y"][:] else error("Variable 'mesh2d_node_x' (or similar) is missing in D-Flow FM file") end @@ -240,11 +224,7 @@ function get_times(map, reftime::DateTime) time_relative = map[1].vars["time"] units = time_relative.atts["units"] temp = split(units, "since") - if endswith(temp[2],".0 +0000") - t0 = DateTime(strip(temp[2]), "yyyy-mm-dd HH:MM:SS.0 +0000") # format used in FEWS - else - t0 = DateTime(strip(temp[2]), "yyyy-mm-dd HH:MM:SS +00:00") - end + t0 = DateTime(strip(temp[2]), "yyyy-mm-dd HH:MM:SS") dt_seconds = 1.0 if startswith(temp[1], "seconds") dt_seconds = 1.0 @@ -291,11 +271,7 @@ function get_reftime(map) time_relative = map[1].vars["time"] units = time_relative.atts["units"] temp = split(units, "since") - if endswith(temp[2],"0 +00:00") - t0 = DateTime(strip(temp[2]), "yyyy-mm-dd HH:MM:SS +00:00") - else - t0 = DateTime(strip(temp[2]), "yyyy-mm-dd HH:MM:SS") - end + t0 = DateTime(strip(temp[2]), "yyyy-mm-dd HH:MM:SS") dt_seconds = 1.0 if startswith(temp[1], "seconds") dt_seconds = 1.0 @@ -337,18 +313,8 @@ function initialize_interpolation(dflow_map, interp::Interpolator, varname, reft # dumval_cache=dumval # reftime_cache=reftime times_cache = get_times(dflow_map, reftime) - no_domains = length(interp.grids) - multiple_runs = length(dflow_map) > no_domains # multiple runs per domain available - if multiple_runs - times_cache_per_run = [get_times([dflow_map[i]], reftime) for i in 1:no_domains:length(dflow_map)] - dflow_map_all = dflow_map - dflow_map_ind = 1:no_domains - dflow_map = dflow_map[dflow_map_ind] - end - # keep 3 times in memory time_cache = zeros(3) - time_cache_all_domains = zeros(3,no_domains) var_cache = Array{Any,1}(undef, 3) initialized = false if !haskey(dflow_map[1].vars, varname) @@ -360,40 +326,17 @@ function initialize_interpolation(dflow_map, interp::Interpolator, varname, reft throw(ArgumentError("$varname is not a variable in the map")) end end - time_cache_all_domains = repeat(times_cache[1:3], 1, no_domains) - time_cache_index_all_domains = repeat([3], no_domains) # index of last cached field in list of all available times for ti = 1:3 + time_cache[ti] = times_cache[ti] var_cache[ti] = load_nc_map_slice(dflow_map, varname, ti) end - (debuglevel > 4) && println("Initial cache index=$(time_cache_index_all_domains[1]) ") + time_cache_index = 3 # index of last cached field in list of all available times + (debuglevel > 4) && println("Initial cache index=$(time_cache_index) ") """ update_cache_forwards(t) Refresh the cached fields in the forwards time directions if needed. """ - function update_cache_forwards(t, domainnr) - time_cache = time_cache_all_domains[:, domainnr] - time_cache_index = time_cache_index_all_domains[domainnr] - - # First, update dflow_map and refresh cache - if multiple_runs - run_ind_needed = findlast([(t >= times[1]) && (t <= times[end]) for times in times_cache_per_run]) - if !isnothing(run_ind_needed) - dflow_map_ind_needed = ((run_ind_needed-1)*no_domains+1) : (run_ind_needed*no_domains) - if dflow_map_ind_needed != dflow_map_ind # update of dflow_map needed (e.g. using run02/run_0000_map.nc instead of run01/run_0000_map.nc) - (debuglevel >= 1) && println("t = $(t) - Update '$(varname)'-data using: ../$(joinpath(splitpath(dflow_map_all[dflow_map_ind_needed[domainnr]].name)[end-2:end]...))") - dflow_map_ind = dflow_map_ind_needed - dflow_map = dflow_map_all[dflow_map_ind] - times_cache = times_cache_per_run[run_ind_needed] - for ti = 1:3 - time_cache[ti] = times_cache[ti] - var_cache[ti][domainnr] = load_nc_map_slice(dflow_map, varname, ti, -1, domainnr)[1] - end - time_cache_index = 3 # index of last cached field in list of all available times - end - end - end - - # Next, update cache of this dflow_map + function update_cache_forwards(t) if (t >= time_cache[1]) && (t <= time_cache[3]) (debuglevel >= 2) && println("cache is okay") elseif t > times_cache[end] @@ -403,31 +346,26 @@ function initialize_interpolation(dflow_map, interp::Interpolator, varname, reft time_cache[1] = time_cache[2] time_cache[2] = time_cache[3] time_cache[3] = times_cache[time_cache_index + 1] - var_cache[1][domainnr] = var_cache[2][domainnr] - var_cache[2][domainnr] = var_cache[3][domainnr] - var_cache[3][domainnr] = load_nc_map_slice(dflow_map, varname, time_cache_index + 1, -1, domainnr)[1] + var_cache[1] = var_cache[2] + var_cache[2] = var_cache[3] + var_cache[3] = load_nc_map_slice(dflow_map, varname, time_cache_index + 1) time_cache_index += 1 elseif t < times_cache[1] error("Trying to access before first map t=$(t) < $(times_cache[1])") else # complete refresh of cache (debuglevel >= 2) && println("refresh cache") ti = findfirst(tt -> tt > t, times_cache) - (ti != length(times_cache)) || (ti = ti - 1) - (ti != 1) || (ti = ti + 1) (debuglevel >= 4) && println("ti=$(ti), t=$(t)") (debuglevel >= 4) && println("$(times_cache)") time_cache[1] = times_cache[ti - 1] time_cache[2] = times_cache[ti] time_cache[3] = times_cache[ti + 1] - var_cache[1][domainnr] = load_nc_map_slice(dflow_map, varname, ti - 1, -1, domainnr)[1] - var_cache[2][domainnr] = load_nc_map_slice(dflow_map, varname, ti , -1, domainnr)[1] - var_cache[3][domainnr] = load_nc_map_slice(dflow_map, varname, ti + 1, -1, domainnr)[1] + var_cache[1] = load_nc_map_slice(dflow_map, varname, ti - 1) + var_cache[2] = load_nc_map_slice(dflow_map, varname, ti) + var_cache[3] = load_nc_map_slice(dflow_map, varname, ti + 1) time_cache_index = ti + 1 end (debuglevel >= 4) && println("$(time_cache_index) $(time_cache[1]) $(time_cache[2]) $(time_cache[3]) ") - - time_cache_all_domains[:, domainnr] = time_cache - time_cache_index_all_domains[domainnr] = time_cache_index end """ @@ -487,9 +425,8 @@ function initialize_interpolation(dflow_map, interp::Interpolator, varname, reft # flow in x direction (for now has to be called u) function f(x, y, z, t) ind = find_index(interp, x, y) - if ind[1] == -1; return dumval; end if cache_direction == :forwards - update_cache_forwards(t, ind[1]) + update_cache_forwards(t) elseif cache_direction == :backwards update_cache_backwards(t) end diff --git a/test/test_cmems_grid.jl b/test/test_cmems_grid.jl index 79f5850..2598a7d 100644 --- a/test/test_cmems_grid.jl +++ b/test/test_cmems_grid.jl @@ -63,34 +63,5 @@ function test2() @test u2≈0.037575478067336805 end -function test3() - #init - cmems_datas = CmemsData("../test_data", r"cmems_map_u_.+_time..nc"; lon = "x", lat = "y") # Regex - @test length(cmems_datas) == 2 - cmems_datas = CmemsData("../test_data", "cmems_map_u_*_time*.nc"; lon = "x", lat = "y") # Glob - @test length(cmems_datas) == 2 - cmems_data = cmems_datas[1] - - t0 = get_reftime(cmems_data) - @test t0 == DateTime(2022,05,20) - - times = get_times(cmems_data,t0) - @test times[1] ≈ 0.0 - @test times[2] ≈ 3600.0 - - t2 = as_DateTime(cmems_data, t0, times[2]) - @test t2 == DateTime("2022-05-20T01:00:00") - - - u = initialize_interpolation(cmems_datas, "uo", t0, NaN) #water velocity x-dir - u1 = u(0.5, 52, 0.0, 1800.0) # only available in 'time1'-file - @test u1 ≈ -1 - u1 = u(0.5, 52, 0.0, 21600.0) # available in both 'time1'- and 'time2'-file (so pick 'time2'-file) - @test u1 ≈ 1 - u1 = u(0.5, 52, 0.0, 90000.0) # only available in 'time2'-file - @test u1 ≈ 1 -end - test1() test2() -test3() diff --git a/test/test_dflow.jl b/test/test_dflow.jl index f7907b4..0df994d 100644 --- a/test/test_dflow.jl +++ b/test/test_dflow.jl @@ -83,34 +83,6 @@ function test2() end function test3() - #init - dflow_map=load_nc_info("../test_data","dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0000_map.nc") - @test length(dflow_map)==1 - dflow_map=load_nc_info("../test_data","dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/cb_2d_0000_map.nc") - @test length(dflow_map)==1 - dflow_map=load_nc_info("../test_data/dfm_multiple_runs_and_domains/01_flow_to_right/DFM_OUTPUT_cb_2d/",r"cb_2d_...._map.nc") - @test length(dflow_map)==2 - dflow_map=load_nc_info("../test_data","dfm_multiple_runs_and_domains/*/DFM_OUTPUT_cb_2d/cb_2d_*_map.nc") - @test length(dflow_map)==4 - @test size(dflow_map[1].vars["mesh2d_ucx"])==(317,25) - interp=load_dflow_grid(dflow_map,50,true) - @test length(interp.grids)==2 - - # u,v interpolation functions - t0=get_reftime(dflow_map) - @test t0==DateTime(2001,1,1) - u1,v1=initialize_interpolation(dflow_map,interp,t0) - u_value=u1(3.33, 47.99, 0.0, 1800.0) - @test u_value==0.0435357731534879 - u_value=u1(3.33, 47.99, 0.0, 86399.0) - @test u_value==-0.048490442244461425 - u_value=u1(3.33, 47.99, 0.0, 43200.0) - @test u_value==0 - u_value=u1(3.33, 47.99, 0.0, 43199.0) - @test u_value==0.04905390667374792 -end - -function test4() # open file for 3d lock exchange dflow_map=load_nc_info("../test_data","locxxz_fullgrid_map.nc") @test length(dflow_map)==1 @@ -158,5 +130,3 @@ if isfile(joinpath("../test_data","DCSM-FM_0_5nm_0000_map.nc")) end test3() - -test4()