Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Ant1432/feature #250

Merged
merged 15 commits into from
Jul 12, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@ It contains a table with group dimensions of time series for binding constraints
BREAKING CHANGES :

* `readInputThermal()` / `readInputRES()` default value when no time series in the selected clusters.
* `readClusterDesc()` / `readClusterResDesc()` / `readClusterSTDesc()` are updated with new endpoint "table mode".
- In "text" mode, functions return all properties (with default properties) according to study version.

BUGFIXES :

Expand Down
26 changes: 14 additions & 12 deletions R/importOutput.R
Original file line number Diff line number Diff line change
Expand Up @@ -377,14 +377,16 @@

# Get cluster capacity and must run mode
clusterDesc <- readClusterDesc(opts)
if(is.null(clusterDesc$must.run)) clusterDesc$must.run <- FALSE
clusterDesc[is.na(must.run), must.run := FALSE]
if (is.null(clusterDesc$min.stable.power)) clusterDesc$min.stable.power <- 0
clusterDesc[is.na(min.stable.power), min.stable.power := 0]
if(is.null(clusterDesc[["must-run"]]))
clusterDesc[["must-run"]] <- FALSE
clusterDesc[is.na(`must-run`), `must-run` := FALSE]
if (is.null(clusterDesc[["min-stable-power"]]))
clusterDesc[["min-stable-power"]] <- 0
clusterDesc[is.na(`min-stable-power`), `min-stable-power` := 0]
clusterDesc <- clusterDesc[, .(area, cluster,
capacity = nominalcapacity * unitcount,
min.stable.power,
must.run)]
`min-stable-power`,
`must-run`)]

# Are clusters in partial must run mode ?
mod <- llply(areas, .importThermalModulation, opts = opts, timeStep = "hourly")
Expand Down Expand Up @@ -449,16 +451,16 @@

}

.mergeByRef(res, clusterDesc[,.(area, cluster, must.run, min.stable.power)])
.mergeByRef(res, clusterDesc[,.(area, cluster, `must-run`, `min-stable-power`)])

if (is.null(res$NODU)) res[, thermalPmin := 0]
else res[, thermalPmin := min.stable.power * NODU]
else res[, thermalPmin := `min-stable-power` * NODU]

res[, `:=`(
mustRun = production * must.run,
mustRunTotal = production * must.run + mustRunPartial,
must.run = NULL,
min.stable.power = NULL
mustRun = production * `must-run`,
mustRunTotal = production * `must-run` + mustRunPartial,
`must-run` = NULL,
`min-stable-power` = NULL
)]

res[, thermalPmin := pmax(thermalPmin, mustRunTotal)]
Expand Down
2 changes: 1 addition & 1 deletion R/readAntares.R
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@
#' similar to mustRunTotal except it also takes into account the production
#' induced by the minimum stable power of the units of a cluster. More
#' precisely, for a given cluster and a given time step, it is equal to
#' \code{min(NODU x min.stable.power, mustRunTotal)}.
#' \code{min(NODU x min-stable-power, mustRunTotal)}.
#' @param select
#' Character vector containing the name of the columns to import. If this
#' argument is \code{NULL}, all variables are imported. Special names
Expand Down
190 changes: 118 additions & 72 deletions R/readClusterDesc.R
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,8 @@
#' \code{readClusterResDesc} : read renewable clusters (Antares >= V8.1)
#'
#' \code{readClusterSTDesc} : read st-storage clusters (Antares >= V8.6)
#'
#' If you have no clusters properties, `Null data.table (0 rows and 0 cols)` is returned.
#'
#' @examples
#'
Expand Down Expand Up @@ -92,90 +94,134 @@ readClusterSTDesc <- function(opts = simOptions()) {
.readClusterDesc <- function(opts = simOptions(),
dir = "thermal/clusters") {

if(isH5Opts(opts)){
if(dir %in% "thermal/clusters"){
if(.requireRhdf5_Antares(stopP = FALSE)){
return(h5ReadClusterDesc(opts))
} else {
stop(rhdf5_message, call. = FALSE)
}
} else {
stop("Read cluster Description from '", dir, "' not available using .h5", call. = FALSE)
}
}

path <- file.path(opts$inputPath, dir)

columns <- .generate_columns_by_type(dir = dir)
api_study <- is_api_study(opts)

table_type <- switch(
dir,
"thermal/clusters" = "thermals",
"renewables/clusters" = "renewables",
"st-storage/clusters" = "st-storages"
)

if(api_study){

jsoncld <- read_secure_json(paste0(path, "&depth=4"), token = opts$token, timeout = opts$timeout, config = opts$httr_config)
res <- rbindlist(mapply(function(X1, Y1){
clusters <- rbindlist(
mapply(function(X, Y){
out <- as.data.frame(X)
if(nrow(out) == 0)return(NULL)
out$area = Y
out
}, X1$list, names(X1$list), SIMPLIFY = FALSE), fill = TRUE)
if(is.null(clusters))return(NULL)
if(nrow(clusters)==0)return(NULL)
clusters$area <- Y1
clusters[, .SD, .SDcols = order(names(clusters))]
},jsoncld, names(jsoncld), SIMPLIFY = FALSE), fill = TRUE)


}else{

areas <- list.files(path)

res <- ldply(areas, function(x) {
clusters <- readIniFile(file.path(path, x, "list.ini"))

if (length(clusters) == 0) return(NULL)

clusters <- ldply(clusters, as.data.frame)
clusters$.id <- NULL
clusters$area <- x

clusters[, c(ncol(clusters), 1:(ncol(clusters) - 1))]
})
# api request with all columns
list_clusters = api_get(
opts = opts,
endpoint = paste0(opts$study_id, "/table-mode/", table_type),
query = list(
columns = ""
)
)

return(list_clusters)
}

# "text" mode
areas <- list.files(path)

if(length(res) == 0){
mandatory_cols <- c("area","cluster")
warning("No cluster description available.", call. = FALSE)
res <- setNames(data.table(matrix(nrow = 0, ncol = length(mandatory_cols) + length(columns))), c(mandatory_cols, columns))
}else{
if(api_study){
mandatory_cols <- c("area", "name", "group")
additional_cols <- setdiff(colnames(res),mandatory_cols)
res <- res[, .SD, .SDcols = c(mandatory_cols, additional_cols)]
}
res <- as.data.table(res)
setnames(res, "name", "cluster")
res$cluster <- as.factor(tolower(res$cluster))
}
# READ cluster properties
properties <- get_input_cluster_properties(table_type = table_type,
opts = opts)

# read properties for each area
res <- plyr::llply(areas, function(x, prop_ref=properties) {
clusters <- readIniFile(file.path(path, x, "list.ini"))
if (length(clusters) == 0)
return(NULL)
# conversion list to data.frame
clusters <- plyr::ldply(clusters, function(x){
df_clust <- data.frame(x, check.names = FALSE)
colnames_to_add <- setdiff(names(prop_ref), names(df_clust))
if(!identical(colnames_to_add, character(0)))
df_clust <- cbind(df_clust, prop_ref[, .SD, .SDcols = colnames_to_add])
df_clust
}) # check.names = FALSE (too many side effects)
clusters$.id <- NULL
clusters$area <- x
# re order columns
clusters[, c("area", setdiff(colnames(clusters), "area"))]
})

res <- data.table::rbindlist(l = res, fill = TRUE)

# NO PROPERTIES CLUSTER FOUND
if(length(res) == 0)
return(data.table())

# output format conversion
res <- data.table::as.data.table(res)
data.table::setnames(res, "name", "cluster")
res$cluster <- as.factor(tolower(res$cluster))
res
}

.generate_columns_by_type <- function(dir = c("thermal/clusters", "renewables/clusters", "st-storage/clusters")) {


# read and manage referential properties
# return referential according to type and study version
get_input_cluster_properties <- function(table_type, opts){
# READ cluster properties
full_ref_properties <- pkgEnv[["inputProperties"]]

columns <- switch(
dir,
"thermal/clusters" = c("group","enabled","must_run","unit_count","nominal_capacity",
"min_stable_power","spinning","min_up_time","min_down_time",
"co2","marginal_cost","fixed_cost","startup_cost","market_bid_cost",
"spread_cost","ts_gen","volatility_forced","volatility_planned",
"law_forced","law_planned"),

"renewables/clusters" = c("group","ts_interpretation","enabled","unit_count","nominal_capacity")
#"st-storage/clusters" = #ATTENTE DEV COTé API
category_ref_cluster <- switch(
table_type,
"thermals" = "thermal",
"renewables" = "renewable",
"st-storages" = "storage"
)
return(columns)

# filter by category
ref_filter_by_cat <- full_ref_properties[`Category` %in%
category_ref_cluster]
# filter by study version
ref_filter_by_vers <- ref_filter_by_cat[`Version Antares` <=
opts$antaresVersion |
`Version Antares` %in% NA]

# detect evolution on parameter ? (new value according to study version)
# filter on value according to study version
df_multi_params <- ref_filter_by_vers[,
count := .N,
by = c("INI Name"),
keyby = TRUE][
count>1][,
.SD[which.max(`Version Antares`)],
by="INI Name"]

df_unique_params <- ref_filter_by_vers[,
count := .N,
by = c("INI Name"),
keyby = TRUE][
count==1]

ref_filter_by_vers <- rbind(df_unique_params, df_multi_params)

# select key colums and put wide format
ref_filter_by_vers <- ref_filter_by_vers[ ,
.SD,
.SDcols = c("INI Name",
"Default",
"Type")]

# select names columns to convert to logical + numerical
logical_col_names <- ref_filter_by_vers[Type%in%"bool"][["INI Name"]]
numerical_col_names <- ref_filter_by_vers[Type%in%c("int", "float")][["INI Name"]]

wide_ref <- data.table::dcast(data = ref_filter_by_vers,
formula = .~`INI Name`,
value.var = "Default")[
,
.SD,
.SDcols = -c(".", "name")]
# /!\ column type conversion on
wide_ref[,
(logical_col_names):= lapply(.SD, as.logical),
.SDcols = logical_col_names][
,
(numerical_col_names):= lapply(.SD, as.numeric),
.SDcols = numerical_col_names
]

return(wide_ref)
}
1 change: 1 addition & 0 deletions R/utils.R
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# badge doc ----
badge_api_ok <- function() {
"\\ifelse{html}{\\figure{badge_api_ok.svg}{options: alt='Antares API OK'}}{Antares API: \\strong{OK}}"
}
Expand Down
Loading
Loading