Skip to content

Commit

Permalink
updated to fix a bug associated with longer time for sim, added other…
Browse files Browse the repository at this point in the history
… results to nodal hosting capacity analysis
  • Loading branch information
EC2 Default User committed Mar 14, 2024
1 parent aeb6995 commit a60038f
Show file tree
Hide file tree
Showing 26 changed files with 296 additions and 146 deletions.
4 changes: 1 addition & 3 deletions emerge/cli/cli.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
"""
Command line utility for EMeRGE
"""
"""Command line utility for EMeRGE."""

from pathlib import Path

Expand Down
19 changes: 0 additions & 19 deletions emerge/cli/create_sqlite_table.py

This file was deleted.

1 change: 0 additions & 1 deletion emerge/cli/multiscenario_metrics.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
""" Module for computing time series metrics for multi scenario simulation."""
import os
from pathlib import Path
import multiprocessing
from typing import Annotated
Expand Down
225 changes: 185 additions & 40 deletions emerge/cli/nodal_hosting_capacity.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,10 @@
from datetime import datetime
import json
import multiprocessing
import time
import math

import click
from emerge.metrics.system_metrics import SARDI_aggregated
from emerge.simulator.simulation_manager import OpenDSSSimulationManager
import numpy as np
import polars as pl
from pydantic import BaseModel, Field
Expand All @@ -22,7 +22,26 @@
from emerge.cli import get_num_core
from emerge.metrics import observer
from emerge.simulator import opendss
from emerge.cli.create_sqlite_table import create_table, HostingCapacityResult
from emerge.cli.nodal_hosting_sqlite_tables import (
HostingCapacityReport,
OverloadedLinesReport,
OverloadedTransformersReport,
SimulationConvergenceReport,
TotalEnergyReport,
create_table,
get_engine
)
from emerge.metrics.line_loading_stats import OverloadedLines
from emerge.metrics.system_metrics import (
SARDI_aggregated,
SARDI_line,
SARDI_transformer,
SARDI_voltage,
TotalEnergy,
TotalPVGeneration
)
from emerge.metrics.xfmr_loading_stats import OverloadedTransformers
from emerge.simulator.simulation_manager import OpenDSSSimulationManager

class BasicSimulationSettings(BaseModel):
"""Interface for basic simulation settings."""
Expand All @@ -47,53 +66,173 @@ class MultiNodeHostingCapacityInput(SingleNodeHostingCapacityInput):
export_sqlite_path: Annotated[Path, Field(..., description="Sqlite file to export nodal hosting capacity.")]
pv_profile: Annotated[str, Field(..., description="Name of the profile for pv system")]


class NodalHostingCapacityReport:
""" Class interface for capturing nodal hosting capacity results. """
def __init__(self):

self.subject = observer.MetricsSubject()
self.sardi_agg = SARDI_aggregated()
self.sardi_voltage = SARDI_voltage()
self.sardi_line = SARDI_line()
self.sardi_xfmr = SARDI_transformer()
self.solar_generation = TotalPVGeneration()
self.ol_lines = OverloadedLines()
self.ol_xmfrs = OverloadedTransformers()
self.circuit_energy = TotalEnergy()
self.export_energy = TotalEnergy(export_only=True)

for obs in [self.sardi_agg, self.sardi_voltage, self.sardi_line, self.sardi_xfmr,
self.solar_generation, self.ol_lines, self.ol_xmfrs, self.circuit_energy, self.export_energy]:
self.subject.attach(obs)

def is_hosting_capacity_reached(self) -> bool:
""" Method to check if hosting capacity is reached."""
return self.get_sardi_aggregated() > 0 or self.get_total_export_energy() !=0

def get_subject(self) -> observer.MetricsSubject:
""" Return subject container containing observers."""
return self.subject

def get_overloaded_xfmr_loadings(self) -> dict[str, list[float]]:
""" Return overloaded transformers."""
return self.ol_xmfrs.get_metric()

def get_overloaded_line_loadings(self) -> dict[str, list[float]]:
"""Returns overloaded lines."""
return self.ol_lines.get_metric()

def get_sardi_voltage(self):
""" Returns SARDI voltage metrics."""
return pl.from_dict(self.sardi_voltage.get_metric())['sardi_voltage'].to_list()[0]

def get_sardi_aggregated(self):
"""Method to return SARDI aggregate metric."""
return pl.from_dict(self.sardi_agg.get_metric())['sardi_aggregated'].to_list()[0]

def get_sardi_line(self):
""" Method to return SARDI line metric. """
return pl.from_dict(self.sardi_line.get_metric())['sardi_line'].to_list()[0]

def get_sardi_transformer(self):
""" Method to return SARDI transformer metric. """
return pl.from_dict(self.sardi_xfmr.get_metric())['sardi_transformer'].to_list()[0]

def get_solar_total_energy(self):
""" Method to return total solar energy. """
return pl.from_dict(self.solar_generation.get_metric())['active_power'].to_list()[0]

def get_circuit_total_energy(self):
""" Method to get circuit total energy."""
return pl.from_dict(self.circuit_energy.get_metric())['active_power'].to_list()[0]

def get_total_export_energy(self):
"""Method to get total export energy."""
return pl.from_dict(self.export_energy.get_metric())['active_power'].to_list()[0]

def _compute_hosting_capacity(input):
""" Wrapper around compute hosting capacity. """
return compute_hosting_capacity(*input)

def compute_hosting_capacity(config: SingleNodeHostingCapacityInput, bus: str, pv_profile: str) -> tuple[float, str]:
def compute_hosting_capacity(config: SingleNodeHostingCapacityInput,
bus: str,
pv_profile: str,
sqlite_file: Path
):
""" Function to compute node hosting capacity."""
hosting_capacity = 0
opendss_instance = opendss.OpenDSSSimulator(config.master_dss_file)
opendss_instance.dss_instance.Circuit.SetActiveBus(bus)
bus_kv = opendss_instance.dss_instance.Bus.kVBase()
pv_name = f"{bus}_pv"
engine = get_engine(sqlite_file)

new_pv = f"new PVSystem.{pv_name} bus1={bus} "
f"kv={round(bus_kv,2 )} phases=3 kVA=1000 Pmpp=1000 "
f"PF=1.0 yearly={pv_profile}"
for capacity in np.arange(config.step_kw, config.max_kw, config.step_kw):

opendss_instance = opendss.OpenDSSSimulator(config.master_dss_file)
opendss_instance.dss_instance.Circuit.SetActiveBus(bus)
bus_kv = round(opendss_instance.dss_instance.Bus.kVBase()*math.sqrt(3), 2)
pv_name = f"{bus}_pv"

new_pv = f"new PVSystem.{pv_name} bus1={bus} kv={round(bus_kv,2 )} " + \
f"phases=3 kVA={capacity} Pmpp={capacity} PF=1.0 yearly={pv_profile}"
logger.info(new_pv)

opendss_instance.dss_instance.run_command(new_pv)
opendss_instance.dss_instance.run_command(new_pv)
opendss_instance.recalc()
opendss_instance.solve()

for capacity in np.arange(config.step_kw, config.max_kw, config.step_kw):
opendss_instance.dss_instance.run_command(
f"pvsystem.{pv_name}.kva={capacity}")
opendss_instance.dss_instance.run_command(
f"pvsystem.{pv_name}.pmpp={capacity}")

sim_manager = OpenDSSSimulationManager(
opendss_instance=opendss_instance,
simulation_start_time=config.start_time,
profile_start_time=config.profile_start_time,
simulation_end_time=config.end_time,
simulation_timestep_min=config.resolution_min
)

report_instance = NodalHostingCapacityReport()
logger.info(f"Node started {bus}, capacity {capacity}")

start_time = time.time()
sim_manager.simulate(subject=report_instance.get_subject())
end_time = time.time()
logger.info(f"Node finished {bus}, "
f"elpased time {end_time - start_time} seconds")


with Session(engine) as session:
for timestamp, conv_ in zip(sim_manager.convergence_dict['datetime'],
sim_manager.convergence_dict['convergence']):
session.add(SimulationConvergenceReport(
node_name=bus,
convergence=conv_,
capacity_kw=capacity,
timestamp=timestamp
))

session.add(TotalEnergyReport(
node_name=bus,
pv_capacity_kw=capacity,
pv_energy_mwh=report_instance.get_solar_total_energy(),
circuit_energy_mwh=report_instance.get_circuit_total_energy()
))

subject = observer.MetricsSubject()
sardi_observer = SARDI_aggregated()
subject.attach(sardi_observer)
for ol_line, loadings in report_instance.get_overloaded_line_loadings().items():
session.add(
OverloadedLinesReport(
start_time=config.start_time,
resolution_min=config.resolution_min,
node_name=bus,
line_name=ol_line,
loadings=str(loadings)
)
)

sim_manager.simulate(subject=subject)
logger.info(f"Bus finished {bus}, capacity {capacity}")
sardi_aggregated = pl.from_dict(
sardi_observer.get_metric()
)['sardi_aggregated'].to_list()[0]
for ol_xfmr, loadings in report_instance.get_overloaded_xfmr_loadings().items():
session.add(
OverloadedTransformersReport(
start_time=config.start_time,
resolution_min=config.resolution_min,
node_name=bus,
xfmr_name=ol_xfmr,
loadings=str(loadings)
)
)
session.commit()

if sardi_aggregated > 0:
if report_instance.is_hosting_capacity_reached() > 0:
break

hosting_capacity = capacity


return hosting_capacity, bus
with Session(engine) as session:
session.add(HostingCapacityReport(
node_name=bus,
hosting_capacity_kw=hosting_capacity,
sardi_voltage=report_instance.get_sardi_voltage(),
sardi_aggregated=report_instance.get_sardi_aggregated(),
sardi_line=report_instance.get_sardi_line(),
sardi_transformer=report_instance.get_sardi_transformer()
))
session.commit()



@click.command()
Expand All @@ -102,8 +241,15 @@ def compute_hosting_capacity(config: SingleNodeHostingCapacityInput, bus: str, p
"--config",
help="Path to config file for running timeseries simulation",
)
@click.option(
"-n",
"--nodes",
default='',
show_default=True,
help="List of comma separated nodes",
)
def nodal_hosting_analysis(
config: str
config: str, nodes: str
):
"""Run multiscenario time series simulation and compute
time series metrics."""
Expand All @@ -113,22 +259,21 @@ def nodal_hosting_analysis(

config: MultiNodeHostingCapacityInput = MultiNodeHostingCapacityInput.model_validate(config_dict)

opendss_instance = opendss.OpenDSSSimulator(config.master_dss_file)
buses = opendss_instance.dss_instance.Circuit.AllBusNames()
if not nodes:
opendss_instance = opendss.OpenDSSSimulator(config.master_dss_file)
buses = opendss_instance.dss_instance.Circuit.AllBusNames()
else:
buses = nodes.split(',')

engine = create_table(config.export_sqlite_path)
create_table(config.export_sqlite_path)

num_core = get_num_core.get_num_core(config.num_core, len(buses))
with multiprocessing.Pool(int(num_core)) as pool:
data_to_process = [
[SingleNodeHostingCapacityInput.model_validate(config.model_dump()),
bus,
config.pv_profile] for bus in buses
config.pv_profile,
config.export_sqlite_path
] for bus in buses
]
async_results = [pool.apply_async(_compute_hosting_capacity, (data,)) for data in data_to_process]

with Session(engine) as session:
for result in async_results:
capacity, bus_ = result.get()
session.add(HostingCapacityResult(name=bus_, hoting_capacity_kw=capacity))
session.commit()
pool.map(_compute_hosting_capacity, data_to_process)
58 changes: 58 additions & 0 deletions emerge/cli/nodal_hosting_sqlite_tables.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@

from typing import Optional
from pathlib import Path
from datetime import datetime

from sqlmodel import Field, SQLModel, create_engine


class HostingCapacityReport(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
node_name: str
hosting_capacity_kw: float
sardi_voltage: float
sardi_line: float
sardi_transformer: float
sardi_aggregated: float


class SimulationConvergenceReport(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
capacity_kw: float
node_name: str
convergence: bool
timestamp: datetime


class TotalEnergyReport(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
node_name: str
pv_capacity_kw: float
pv_energy_mwh: float
circuit_energy_mwh: float

class OverloadedLinesReport(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
start_time: datetime
resolution_min: int
node_name: str
line_name: str
loadings: str

class OverloadedTransformersReport(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
start_time: datetime
resolution_min: int
node_name: str
xfmr_name: str
loadings: str

def get_engine(sqlite_file: Path):
return create_engine(f"sqlite:///{str(sqlite_file)}")

def create_table(sqlite_file: Path):
""" Function to create sqlite table."""

engine = create_engine(f"sqlite:///{str(sqlite_file)}")
SQLModel.metadata.create_all(engine)
return engine
Loading

0 comments on commit a60038f

Please sign in to comment.