From 8db0aa60a83403c2e94330409198ab474015b469 Mon Sep 17 00:00:00 2001 From: Dylan McDowell Date: Mon, 13 Nov 2023 14:32:35 -0700 Subject: [PATCH 01/24] Refactor Pyomo Dispatch --- src/dispatch/DispatchState.py | 43 ++ src/dispatch/PyomoRuleLibrary.py | 296 +++++++++++++ src/dispatch/putils.py | 147 +++++++ src/dispatch/pyomo_dispatch.py | 697 +++++-------------------------- 4 files changed, 590 insertions(+), 593 deletions(-) create mode 100644 src/dispatch/PyomoRuleLibrary.py create mode 100644 src/dispatch/putils.py diff --git a/src/dispatch/DispatchState.py b/src/dispatch/DispatchState.py index 18b133b3..11cacb15 100644 --- a/src/dispatch/DispatchState.py +++ b/src/dispatch/DispatchState.py @@ -199,3 +199,46 @@ def set_activity_vector(self, comp, res, values, tracker='production', start_idx r = self._resources[comp][res] self._data[f'{comp.name}_{tracker}'][r, start_idx:end_idx] = values + +# DispatchState for Pyomo dispatcher +class PyomoState(DispatchState): + def __init__(self): + """ + Constructor. + @ In, None + @ Out, None + """ + DispatchState.__init__(self) + self._model = None # Pyomo model object + + def initialize(self, components, resources_map, times, model): + """ + Connect information about this State to other objects + @ In, components, list, HERON components + @ In, resources_map, dict, map of component names to resources used + @ In, times, np.array, values of "time" this state represents + @ In, model, pyomo.Model, associated model for this state + @ Out, None + """ + DispatchState.initialize(self, components, resources_map, times) + self._model = model + + def get_activity_indexed(self, comp, activity, r, t, valued=True, **kwargs): + """ + Getter for activity level. + @ In, comp, HERON Component, component whose information should be retrieved + @ In, activity, str, tracking variable name for activity subset + @ In, r, int, index of resource to retrieve (as given by meta[HERON][resource_indexer]) + @ In, t, int, index of time at which activity should be provided + @ In, valued, bool, optional, if True then get float value instead of pyomo expression + @ In, kwargs, dict, additional pass-through keyword arguments + @ Out, activity, float, amount of resource "res" produced/consumed by "comp" at time "time"; + note positive is producting, negative is consuming + """ + prod = getattr(self._model, f'{comp.name}_{activity}')[r, t] + if valued: + return prod() + return prod + + def set_activity_indexed(self, comp, r, t, value, valued=False): + raise NotImplementedError diff --git a/src/dispatch/PyomoRuleLibrary.py b/src/dispatch/PyomoRuleLibrary.py new file mode 100644 index 00000000..125c7860 --- /dev/null +++ b/src/dispatch/PyomoRuleLibrary.py @@ -0,0 +1,296 @@ +""" +""" + +import pyomo.environ as pyo + +def charge_rule(charge_name, bin_name, large_eps, r, m, t) -> bool: + """ + Constructs pyomo don't-charge-while-discharging constraints. + For storage units specificially. + @ In, charge_name, str, name of charging variable + @ In, bin_name, str, name of forcing binary variable + @ In, large_eps, float, a large-ish number w.r.t. storage activity + @ In, r, int, index of stored resource (is this always 0?) + @ In, m, pyo.ConcreteModel, associated model + @ In, t, int, time index for capacity rule + @ Out, rule, bool, inequality used to limit charge behavior + """ + charge_var = getattr(m, charge_name) + bin_var = getattr(m, bin_name) + return -charge_var[r, t] <= (1 - bin_var[r, t]) * large_eps + +def discharge_rule(discharge_name, bin_name, large_eps, r, m, t) -> bool: + """ + Constructs pyomo don't-discharge-while-charging constraints. + For storage units specificially. + @ In, discharge_name, str, name of discharging variable + @ In, bin_name, str, name of forcing binary variable + @ In, large_eps, float, a large-ish number w.r.t. storage activity + @ In, r, int, index of stored resource (is this always 0?) + @ In, m, pyo.ConcreteModel, associated model + @ In, t, int, time index for capacity rule + @ Out, rule, bool, inequality used to limit discharge behavior + """ + discharge_var = getattr(m, discharge_name) + bin_var = getattr(m, bin_name) + return discharge_var[r, t] <= bin_var[r, t] * large_eps + +def level_rule(comp, level_name, charge_name, discharge_name, initial_storage, r, m, t) -> bool: + """ + Constructs pyomo charge-discharge-level balance constraints. + For storage units specificially. + @ In, comp, Component, storage component of interest + @ In, level_name, str, name of level-tracking variable + @ In, charge_name, str, name of charging variable + @ In, discharge_name, str, name of discharging variable + @ In, initial_storage, dict, initial storage levels by component + @ In, r, int, index of stored resource (is this always 0?) + @ In, m, pyo.ConcreteModel, associated model + @ In, t, int, time index for capacity rule + @ Out, rule, bool, inequality used to limit level behavior + """ + level_var = getattr(m, level_name) + charge_var = getattr(m, charge_name) + discharge_var = getattr(m, discharge_name) + if t > 0: + previous = level_var[r, t-1] + dt = m.Times[t] - m.Times[t-1] + else: + previous = initial_storage[comp] + dt = m.Times[1] - m.Times[0] + rte2 = comp.get_sqrt_RTE() # square root of the round-trip efficiency + production = - rte2 * charge_var[r, t] - discharge_var[r, t] / rte2 + return level_var[r, t] == previous + production * dt + +def periodic_level_rule(comp, level_name, initial_storage, r, m, t) -> bool: + """ + Mandates storage units end with the same level they start with, which prevents + "free energy" or "free sink" due to initial starting levels. + For storage units specificially. + @ In, comp, Component, storage component of interest + @ In, level_name, str, name of level-tracking variable + @ In, initial_storage, dict, initial storage levels by component + @ In, r, int, index of stored resource (is this always 0?) + @ In, m, pyo.ConcreteModel, associated model + @ In, t, int, time index for capacity rule + @ Out, rule, bool, inequality used to limit level behavior + """ + return getattr(m, level_name)[r, m.T[-1]] == initial_storage[comp] + +def capacity_rule(prod_name, r, caps, m, t) -> bool: + """ + Constructs pyomo capacity constraints. + @ In, prod_name, str, name of production variable + @ In, r, int, index of resource for capacity constraining + @ In, caps, list(float), value to constrain resource at in time + @ In, m, pyo.ConcreteModel, associated model + @ In, t, int, time index for capacity rule + """ + kind = 'lower' if min(caps) < 0 else 'upper' + return prod_limit_rule(prod_name, r, caps, kind, t, m) + +def prod_limit_rule(prod_name, r, limits, kind, t, m) -> bool: + """ + Constructs pyomo production constraints. + @ In, prod_name, str, name of production variable + @ In, r, int, index of resource for capacity constraining + @ In, limits, list(float), values in time at which to constrain resource production + @ In, kind, str, either 'upper' or 'lower' for limiting production + @ In, t, int, time index for production rule (NOTE not pyomo index, rather fixed index) + @ In, m, pyo.ConcreteModel, associated model + """ + prod = getattr(m, prod_name) + if kind == 'lower': + # production must exceed value + return prod[r, t] >= limits[t] + elif kind == 'upper': + return prod[r, t] <= limits[t] + else: + raise TypeError('Unrecognized production limit "kind":', kind) + +def ramp_rule_down(prod_name, r, limit, neg_cap, t, m, bins=None) -> bool: + """ + Constructs pyomo production ramping constraints for reducing production level. + Note that this is number-getting-less-positive for positive-defined capacity, while + it is number-getting-less-negative for negative-defined capacity. + This means that dQ is negative for positive-defined capacity, but positive for vice versa + @ In, prod_name, str, name of production variable + @ In, r, int, index of resource for capacity constraining + @ In, limit, float, limiting change in production level across time steps. NOTE: negative for negative-defined capacity. + @ In, neg_cap, bool, True if capacity is expressed as negative (consumer) + @ In, t, int, time index for ramp limit rule (NOTE not pyomo index, rather fixed index) + @ In, m, pyo.ConcreteModel, associated model + @ In, bins, tuple, optional, (lower, upper, steady) binaries if limiting ramp frequency + @ Out, rule, expression, evaluation for Pyomo constraint + """ + prod = getattr(m, prod_name) + if t == 0: + return pyo.Constraint.Skip + delta = prod[r, t] - prod[r, t-1] + # special treatment if we have frequency-limiting binaries available + if bins is None: + if neg_cap: + # NOTE change in production should be "less positive" than the max + # "negative decrease" in production (decrease is positive when defined by consuming) + return delta <= - limit + else: + # dq is negative, - limit is negative + return delta >= - limit + else: + eps = 1.0 # aux parameter to force binaries to behave, TODO needed? + down = bins[0][t] + up = bins[1][t] + # NOTE we're following the convention that "less negative" is ramping "down" + # for capacity defined by consumption + # e.g. consuming 100 ramps down to consuming 70 is (-100 -> -70), dq = 30 + if neg_cap: + # dq <= limit * dt * Bu + eps * Bd, if limit <= 0 + # dq is positive, - limit is positive + return delta <= - limit * down - eps * up + else: + # dq <= limit * dt * Bu - eps * Bd, if limit >= 0 + # dq is negative, - limit is negative + return delta >= - limit * down + eps * up + +def ramp_rule_up(prod_name, r, limit, neg_cap, t, m, bins=None) -> bool: + """ + Constructs pyomo production ramping constraints. + @ In, prod_name, str, name of production variable + @ In, r, int, index of resource for capacity constraining + @ In, limit, float, limiting change in production level across time steps + @ In, neg_cap, bool, True if capacity is expressed as negative (consumer) + @ In, t, int, time index for ramp limit rule (NOTE not pyomo index, rather fixed index) + @ In, m, pyo.ConcreteModel, associated model + @ In, bins, tuple, optional, (lower, steady, upper) binaries if limiting ramp frequency + """ + prod = getattr(m, prod_name) + if t == 0: + return pyo.Constraint.Skip + delta = prod[r, t] - prod[r, t-1] + if bins is None: + if neg_cap: + # NOTE change in production should be "more positive" than the max + # "negative increase" in production (increase is negative when defined by consuming) + return delta >= limit + else: + # change in production should be less than the max production increase + return delta <= limit + else: + # special treatment if we have frequency-limiting binaries available + eps = 1.0 # aux parameter to force binaries to behave, TODO needed? + down = bins[0][t] + up = bins[1][t] + # NOTE we're following the convention that "more negative" is ramping "up" + # for capacity defined by consumption + # e.g. consuming 100 ramps up to consuming 130 is (-100 -> -130), dq = -30 + if neg_cap: + # dq >= limit * dt * Bu + eps * Bd, if limit <= 0 + return delta >= limit * up + eps * down + else: + # dq <= limit * dt * Bu - eps * Bd, if limit >= 0 + return delta <= limit * up - eps * down + +def ramp_freq_rule(Bd, Bu, tao, t, m) -> bool: + """ + Constructs pyomo frequency-of-ramp constraints. + @ In, Bd, bool var, binary tracking down-ramp events + @ In, Bu, bool var, binary tracking up-ramp events + @ In, tao, int, number of time steps to look back + @ In, t, int, time step indexer + @ In, m, pyo.ConcreteModel, pyomo model + """ + if t == 0: + return pyo.Constraint.Skip + # looking-back-window shouldn't be longer than existing time + tao = min(t, tao) + # how many ramp-down events in backward window? + tally = sum(1 - Bd[tm] for tm in range(t - tao, t)) + # only allow ramping up if no rampdowns in back window + ## but we can't use if statements, so use binary math + return Bu[t] <= 1 / tao * tally + +def ramp_freq_bins_rule(Bd, Bu, Bn, t, m) -> bool: + """ + Constructs pyomo constraint for ramping event tracking variables. + This forces choosing between ramping up, ramping down, and steady state operation. + @ In, Bd, bool var, binary tracking down-ramp events + @ In, Bu, bool var, binary tracking up-ramp events + @ In, Bn, bool var, binary tracking no-ramp events + @ In, t, int, time step indexer + @ In, m, pyo.ConcreteModel, pyomo model + """ + return Bd[t] + Bu[t] + Bn[t] == 1 + +def cashflow_rule(compute_cashflows, meta, m) -> float: + """ + Objective function rule. + @ In, meta, dict, additional variable passthrough + @ In, m, pyo.ConcreteModel, associated model + @ Out, total, float, evaluation of cost + """ + activity = m.Activity + state_args = {'valued': False} + total = compute_cashflows(m.Components, activity, m.Times, meta, state_args=state_args, time_offset=m.time_offset) + return total + +def conservation_rule(res, m, t) -> bool: + """ + Constructs conservation constraints. + @ In, initial_storage, dict, initial storage levels at t==0 (not t+offset==0) + @ In, res, str, name of resource + @ In, m, pyo.ConcreteModel, associated model + @ In, t, int, index of time variable + @ Out, conservation, bool, balance check + """ + balance = 0 # sum of production rates, which needs to be zero + for comp, res_dict in m.resource_index_map.items(): + if res in res_dict: + # activity information depends on if storage or component + r = res_dict[res] + intr = comp.get_interaction() + if intr.is_type('Storage'): + # Storages have 3 variables: level, charge, and discharge + # -> so calculate activity + charge = getattr(m, f'{comp.name}_charge') + discharge = getattr(m, f'{comp.name}_discharge') + # note that "charge" is negative (as it's consuming) and discharge is positive + # -> so the intuitive |discharge| - |charge| becomes discharge + charge + production = discharge[r, t] + charge[r, t] + else: + var = getattr(m, f'{comp.name}_production') + # TODO move to this? balance += m._activity.get_activity(comp, res, t) + production = var[r, t] + balance += production + return balance == 0 # TODO tol? + +def min_prod_rule(prod_name, r, caps, minimums, m, t) -> bool: + """ + Constructs minimum production constraint + @ In, prod_name, str, name of production variable + @ In, r, int, index of resource for capacity constraining + @ In, caps, list(float), capacity(t) value for component + @ In, minimums, list(float), minimum allowable production in time + @ In, m, pyo.ConcreteModel, associated model + @ In, t, int, index of time variable + @ Out, minimum, bool, min check + """ + prod = getattr(m, prod_name) + # negative capacity means consuming instead of producing + if max(caps) > 0: + return prod[r, t] >= minimums[t] + else: + return prod[r, t] <= minimums[t] + +def transfer_rule(ratio, r, ref_r, prod_name, m, t) -> bool: + """ + Constructs transfer function constraints + @ In, ratio, float, ratio for resource to nominal first resource + @ In, r, int, index of transfer resource + @ In, ref_r, int, index of reference resource + @ In, prod_name, str, name of production variable + @ In, m, pyo.ConcreteModel, associated model + @ In, t, int, index of time variable + @ Out, transfer, bool, transfer ratio check + """ + prod = getattr(m, prod_name) + return prod[r, t] == prod[ref_r, t] * ratio # TODO tolerance?? diff --git a/src/dispatch/putils.py b/src/dispatch/putils.py new file mode 100644 index 00000000..04d69a99 --- /dev/null +++ b/src/dispatch/putils.py @@ -0,0 +1,147 @@ +""" +""" +import numpy as np +import pyomo.environ as pyo + +def get_prod_bounds(m, comp, meta): + """ + Determines the production limits of the given component + @ In, comp, HERON component, component to get bounds of + @ In, meta, dict, additional state information + @ Out, lower, dict, lower production limit (might be negative for consumption) + @ Out, upper, dict, upper production limit (might be 0 for consumption) + """ + raise NotImplementedError + # cap_res = comp.get_capacity_var() # name of resource that defines capacity + # r = m.resource_index_map[comp][cap_res] + # maxs = [] + # mins = [] + # for t, time in enumerate(m.Times): + # meta['HERON']['time_index'] = t + m.time_offset + # cap = comp.get_capacity(meta)[0][cap_res] + # low = comp.get_minimum(meta)[0][cap_res] + # maxs.append(cap) + # if (comp.is_dispatchable() == 'fixed') or (low == cap): + # low = cap + # # initialize values to avoid boundary errors + # var = getattr(m, prod_name) + # values = var.get_values() + # for k in values: + # values[k] = cap + # var.set_values(values) + # mins.append(low) + # maximum = comp.get_capacity(None, None, None, None)[0][cap_res] + # # TODO minimum! + # # producing or consuming the defining resource? + # # -> put these in dictionaries so we can "get" limits or return None + # if maximum > 0: + # lower = {r: 0} + # upper = {r: maximum} + # else: + # lower = {r: maximum} + # upper = {r: 0} + # return lower, upper + +def get_transfer_coeffs(m, comp) -> dict: + """ + Obtains transfer function ratios (assuming Linear ValuedParams) + Form: 1A + 3B -> 2C + 4D + Ratios are calculated with respect to first resource listed, so e.g. B = 3/1 * A + TODO I think we can handle general external functions, maybe? + @ In, m, pyo.ConcreteModel, associated model + @ In, comp, HERON component, component to get coefficients of + @ Out, ratios, dict, ratios of transfer function variables + """ + transfer = comp.get_interaction().get_transfer() + if transfer is None: + return {} + + # linear transfer coefficients, dict as {resource: coeff}, SIGNS MATTER + # it's all about ratios -> store as ratio of resource / first resource (arbitrary) + coeffs = transfer.get_coefficients() + coeffs_iter = iter(coeffs.items()) + first_name, first_coef = next(coeffs_iter) + first_r = m.resource_index_map[comp][first_name] + ratios = {'__reference': (first_r, first_name, first_coef)} + + for resource, coef in coeffs_iter: + ratios[resource] = coef / first_coef + + return ratios + +def retrieve_solution(m) -> dict: + """ + Extracts solution from Pyomo optimization + @ In, m, pyo.ConcreteModel, associated (solved) model + @ Out, result, dict, {comp: {activity: {resource: [production]}} e.g. generator[production][steam] + """ + return { + component.name: { + tag: retrieve_value_from_model(m, component, tag) + for tag in component.get_tracking_vars() + } + for component in m.Components + } + +def retrieve_value_from_model(m, comp, tag) -> dict: + """ + Retrieve values of a series from the pyomo model. + @ In, m, pyo.ConcreteModel, associated (solved) model + @ In, comp, Component, relevant component + @ In, tag, str, particular history type to retrieve + @ Out, result, dict, {resource: [array], etc} + """ + result = {} + prod = getattr(m, f'{comp.name}_{tag}') + kind = 'Var' if isinstance(prod, pyo.Var) else 'Param' + for res, comp_r in m.resource_index_map[comp].items(): + if kind == 'Var': + result[res] = np.array([prod[comp_r, t].value for t in m.T], dtype=float) + elif kind == 'Param': + result[res] = np.array([prod[comp_r, t] for t in m.T], dtype=float) + return result + +### DEBUG +def debug_pyomo_print(m) -> None: + """ + Prints the setup pieces of the Pyomo model + @ In, m, pyo.ConcreteModel, model to interrogate + @ Out, None + """ + print('/' + '='*80) + print('DEBUGG model pieces:') + print(' -> objective:') + print(' ', m.obj.pprint()) + print(' -> variables:') + for var in m.component_objects(pyo.Var): + print(' ', var.pprint()) + print(' -> constraints:') + for constr in m.component_objects(pyo.Constraint): + print(' ', constr.pprint()) + print('\\' + '='*80) + print('') + +def debug_print_soln(m) -> None: + """ + Prints the solution from the Pyomo model + @ In, m, pyo.ConcreteModel, model to interrogate + @ Out, None + """ + output = ['*' * 80, "DEBUGG solution:", f' objective value: {m.obj()}'] + for c, comp in enumerate(m.Components): + name = comp.name + output.append(f' component: {c} {name}') + for tracker in comp.get_tracking_vars(): + prod = getattr(m, f'{name}_{tracker}') + kind = 'Var' if isinstance(prod, pyo.Var) else 'Param' + for res, r in m.resource_index_map[comp].items(): + output.append(f' tracker: {tracker} resource {r}: {res}') + for t, time in enumerate(m.Times): + if kind == 'Var': + value = prod[r, t].value + elif kind == 'Param': + value = prod[r, t] + output.append(f' time: {t + m.time_offset} {time} {value}') + + output.append('*' * 80) + print('\n'.join(output)) \ No newline at end of file diff --git a/src/dispatch/pyomo_dispatch.py b/src/dispatch/pyomo_dispatch.py index 88477867..db409ba7 100644 --- a/src/dispatch/pyomo_dispatch.py +++ b/src/dispatch/pyomo_dispatch.py @@ -4,33 +4,27 @@ """ pyomo-based dispatch strategy """ - -import os -import sys import time as time_mod import platform -from itertools import compress import pprint - import numpy as np +import pyutilib.subprocess.GlobalData +from itertools import compress + import pyomo.environ as pyo from pyomo.opt import SolverStatus, TerminationCondition - -from ravenframework.utils import InputData, InputTypes - -# allows pyomo to solve on threaded processes -import pyutilib.subprocess.GlobalData -pyutilib.subprocess.GlobalData.DEFINE_SIGNAL_HANDLERS_DEFAULT = False from pyomo.common.errors import ApplicationError +from ravenframework.utils import InputData, InputTypes +from . import PyomoRuleLibrary as prl +from . import putils from .Dispatcher import Dispatcher -from .DispatchState import DispatchState, NumpyState -try: - import _utils as hutils -except (ModuleNotFoundError, ImportError): - sys.path.append(os.path.join(os.path.dirname(__file__), '..')) - import _utils as hutils +from .DispatchState import DispatchState, NumpyState, PyomoState +from .. import _utils as hutils + +# allows pyomo to solve on threaded processes +pyutilib.subprocess.GlobalData.DEFINE_SIGNAL_HANDLERS_DEFAULT = False # Choose solver; CBC is a great choice unless we're on Windows if platform.system() == 'Windows': SOLVERS = ['glpk', 'cbc', 'ipopt'] @@ -252,20 +246,51 @@ def get_solver(self): return self._solver ### INTERNAL - def dispatch_window(self, time, time_offset, - case, components, sources, resources, - initial_storage, meta): + @staticmethod + def _calculate_deltas(activity, initial_level): + """ + """ + deltas = np.zeros(len(activity)) + deltas[1:] = activity[1:] - activity[:-1] + deltas[0] = activity[0] - initial_level + return deltas + + def _process_storage_component(self, m, comp, intr, meta): + """ + """ + activity = intr.get_strategy().evaluate(meta)[0]["level"] + self._create_production_param(m, comp, activity, tag="level") + dt = m.Times[1] - m.Times[0] + rte2 = comp.get_sqrt_RTE() + deltas = self._calculate_deltas(activity, intr.get_initial_level(meta)) + charge = np.where(deltas > 0, -deltas / dt / rte2, 0) + discharge = np.where(deltas < 0, -deltas / dt * rte2, 0) + self._create_production_param(m, comp, charge, tag="charge") + self._create_production_param(m, comp, discharge, tag="discharge") + + def _process_components(self, m, comp, time, initial_storage, meta): + """ + """ + intr = comp.get_interaction() + if intr.is_governed(): + self._process_governed_component(m, comp, time, intr, meta) + elif intr.is_type("Storage"): + self._create_storage(m, comp, initial_storage, meta) + else: + self._create_production(m, comp, meta) + + def _process_governed_component(self, m, comp, time, intr, meta): + """ + """ + meta["request"] = {"component": comp, "time": time} + if intr.is_type("Storage"): + self._process_storage_component(m, comp, intr, meta) + else: + activity = intr.get_strategy().evaluate(meta)[0]['level'] + self._create_production_param(m, comp, activity) + + def _build_pyomo_model(self, time, time_offset, case, components, resources, meta): """ - Dispatches one part of a rolling window. - @ In, time, np.array, value of time to evaluate - @ In, time_offset, int, offset of the time index in the greater history - @ In, case, HERON Case, Case that this dispatch is part of - @ In, components, list, HERON components available to the dispatch - @ In, sources, list, HERON source (placeholders) for signals - @ In, resources, list, sorted list of all resources in problem - @ In, initial_storage, dict, initial storage levels if any - @ In, meta, dict, additional variables passed through - @ Out, result, dict, results of window dispatch """ # build the Pyomo model # TODO abstract this model as much as possible BEFORE, then concrete initialization per window @@ -279,7 +304,6 @@ def dispatch_window(self, time, time_offset, m.R = pyo.Set(initialize=R) m.T = pyo.Set(initialize=T) m.Times = time - dt = m.Times[1] - m.Times[0] # TODO assumes consistent step sizing m.time_offset = time_offset m.resource_index_map = meta['HERON']['resource_indexer'] # maps the resource to its index WITHIN APPLICABLE components (sparse matrix) # e.g. component: {resource: local index}, ... etc} @@ -288,56 +312,44 @@ def dispatch_window(self, time, time_offset, m.Components = components m.Activity = PyomoState() m.Activity.initialize(m.Components, m.resource_index_map, m.Times, m) + return m + + def _populate_pyomo_model(self, model, components, initial_storage, time, resources, meta): + """ + """ # constraints and variables for comp in components: - # components using a governing strategy (not opt) are Parameters, not Variables - # TODO should this come BEFORE or AFTER each dispatch opt solve? - # -> responsive or proactive? - intr = comp.get_interaction() - if intr.is_governed(): - meta['request'] = {'component': comp, 'time': time} - activity = intr.get_strategy().evaluate(meta)[0]['level'] - if intr.is_type('Storage'): - self._create_production_param(m, comp, activity, tag='level') - # set up "activity" rates (change in level over time, plus efficiency) - rte2 = comp.get_sqrt_RTE() # square root of the round-trip efficiency - L = len(activity) - deltas = np.zeros(L) - deltas[1:] = activity[1:] - activity[:-1] - deltas[0] = activity[0] - intr.get_initial_level(meta) - # rate of charge - # change sign, since increasing level means absorbing energy from system - # also scale by RTE, since to get level increase you have to over-absorb - charge = np.zeros(L) - charge_mask = np.where(deltas > 0) - charge[charge_mask] = - deltas[charge_mask] / dt / rte2 - self._create_production_param(m, comp, charge, tag='charge') - # rate of discharge - # change sign, since decreasing level means emitting energy into system - # also scale by RTE, since level decrease yields less to system - discharge = np.zeros(L) - discharge_mask = np.where(deltas < 0) - discharge[discharge_mask] = - deltas[discharge_mask] / dt * rte2 - self._create_production_param(m, comp, discharge, tag='discharge') - else: - self._create_production_param(m, comp, activity) - continue - # NOTE: "fixed" components could hypothetically be treated differently - ## however, in order for the "production" variable for components to be treatable as the - ## same as other production variables, we create components with limitation - ## lowerbound == upperbound == capacity (just for "fixed" dispatch components) - if intr.is_type('Storage'): - self._create_storage(m, comp, initial_storage, meta) - else: - self._create_production(m, comp, meta) # variables - self._create_conservation(m, resources, initial_storage, meta) # conservation of resources (e.g. production == consumption) - self._create_objective(meta, m) # objective + self._process_components(model, comp, time, initial_storage, meta) + self._create_conservation(model, resources, initial_storage, meta) # conservation of resources (e.g. production == consumption) + self._create_objective(meta, model) # objective + + def dispatch_window(self, time, time_offset, case, components, sources, resources, initial_storage, meta): + """ + Dispatches one part of a rolling window. + @ In, time, np.array, value of time to evaluate + @ In, time_offset, int, offset of the time index in the greater history + @ In, case, HERON Case, Case that this dispatch is part of + @ In, components, list, HERON components available to the dispatch + @ In, sources, list, HERON source (placeholders) for signals + @ In, resources, list, sorted list of all resources in problem + @ In, initial_storage, dict, initial storage levels if any + @ In, meta, dict, additional variables passed through + @ Out, result, dict, results of window dispatch + """ + model = self._build_pyomo_model(time, time_offset, case, components, resources, meta) + self._populate_pyomo_model(model, components, initial_storage, time, resources, meta) + result = self._solve_dispatch(model, meta) + return result + + def _solve_dispatch(self, m, meta): + """ + """ # start a solution search done_and_checked = False attempts = 0 # DEBUGG show variables, bounds if self.debug_mode: - self._debug_pyomo_print(m) + putils.debug_pyomo_print(m) while not done_and_checked: attempts += 1 print(f'DEBUGG solve attempt {attempts} ...:') @@ -350,7 +362,7 @@ def dispatch_window(self, time, time_offset, print('DEBUGG ... solve was unsuccessful!') print('DEBUGG ... status:', soln.solver.status) print('DEBUGG ... termination:', soln.solver.termination_condition) - self._debug_pyomo_print(m) + putils.debug_pyomo_print(m) print('Resource Map:') pprint.pprint(m.resource_index_map) raise RuntimeError(f"Solve was unsuccessful! Status: {soln.solver.status} Termination: {soln.solver.termination_condition}") @@ -373,9 +385,9 @@ def dispatch_window(self, time, time_offset, raise RuntimeError('Exceeded validation attempt limit!') if self.debug_mode: soln.write() - self._debug_print_soln(m) + putils.debug_print_soln(m) # return dict of numpy arrays - result = self._retrieve_solution(m) + result = putils.retrieve_solution(m) return result def check_converged(self, new, old, components): @@ -437,7 +449,7 @@ def _create_production_limit(self, m, validation): limits[t] = limit limit_type = validation['limit_type'] prod_name = f'{comp.name}_production' - rule = lambda mod: self._prod_limit_rule(prod_name, r, limits, limit_type, t, mod) + rule = lambda mod: prl.prod_limit_rule(prod_name, r, limits, limit_type, t, mod) constr = pyo.Constraint(rule=rule) counter = 1 name_template = f'{comp.name}_{resource}_{t}_vld_limit_constr_{{i}}' @@ -575,28 +587,23 @@ def _create_ramp_limit(self, m, comp, prod_name, meta): else: ramp_trackers = None # limit production changes when ramping down - ramp_rule_down = lambda mod, t: \ - self._ramp_rule_down(prod_name, r, limit_delta, neg_cap, t, mod, - bins=ramp_trackers) + ramp_rule_down = lambda mod, t: prl.ramp_rule_down(prod_name, r, limit_delta, neg_cap, t, mod, bins=ramp_trackers) constr = pyo.Constraint(m.T, rule=ramp_rule_down) setattr(m, f'{comp.name}_ramp_down_constr', constr) # limit production changes when ramping up - ramp_rule_up = lambda mod, t: \ - self._ramp_rule_up(prod_name, r, limit_delta, neg_cap, t, mod, - bins=ramp_trackers) + ramp_rule_up = lambda mod, t: prl.ramp_rule_up(prod_name, r, limit_delta, neg_cap, t, mod, bins=ramp_trackers) constr = pyo.Constraint(m.T, rule=ramp_rule_up) setattr(m, f'{comp.name}_ramp_up_constr', constr) # if ramping frequency limit, impose binary constraints if comp.ramp_freq: # binaries rule, for exclusive choice up/down/steady - binaries_rule = lambda mod, t: \ - self._ramp_freq_bins_rule(down, up, steady, t, mod) + binaries_rule = lambda mod, t: prl.ramp_freq_bins_rule(down, up, steady, t, mod) constr = pyo.Constraint(m.T, rule=binaries_rule) setattr(m, f'{comp.name}_ramp_freq_binaries', constr) # limit frequency of ramping # TODO calculate "tao" window using ramp freq and dt # -> for now, just use the integer for number of windows - freq_rule = lambda mod, t: self._ramp_freq_rule(down, up, comp.ramp_freq, t, m) + freq_rule = lambda mod, t: prl.ramp_freq_rule(down, up, comp.ramp_freq, t, m) constr = pyo.Constraint(m.T, rule=freq_rule) setattr(m, f'{comp.name}_ramp_freq_constr', constr) @@ -613,11 +620,11 @@ def _create_capacity_constraints(self, m, comp, prod_name, meta): r = m.resource_index_map[comp][cap_res] # production index of the governing resource caps, mins = self._find_production_limits(m, comp, meta) # capacity - max_rule = lambda mod, t: self._capacity_rule(prod_name, r, caps, mod, t) + max_rule = lambda mod, t: prl.capacity_rule(prod_name, r, caps, mod, t) constr = pyo.Constraint(m.T, rule=max_rule) setattr(m, f'{comp.name}_{cap_res}_capacity_constr', constr) # minimum - min_rule = lambda mod, t: self._min_prod_rule(prod_name, r, caps, mins, mod, t) + min_rule = lambda mod, t: prl.min_prod_rule(prod_name, r, caps, mins, mod, t) constr = pyo.Constraint(m.T, rule=min_rule) # set initial conditions for t, time in enumerate(m.Times): @@ -674,12 +681,12 @@ def _create_transfer(self, m, comp, prod_name): # TODO this could also take a transfer function from an external Python function assuming # we're careful about how the expression-vs-float gets used # and figure out how to handle multiple ins, multiple outs - ratios = self._get_transfer_coeffs(m, comp) + ratios = putils.get_transfer_coeffs(m, comp) ref_r, ref_name, _ = ratios.pop('__reference', (None, None, None)) for resource, ratio in ratios.items(): r = m.resource_index_map[comp][resource] rule_name = f'{name}_{resource}_{ref_name}_transfer' - rule = lambda mod, t: self._transfer_rule(ratio, r, ref_r, prod_name, mod, t) + rule = lambda mod, t: prl.transfer_rule(ratio, r, ref_r, prod_name, mod, t) constr = pyo.Constraint(m.T, rule=rule) setattr(m, rule_name, constr) @@ -706,13 +713,12 @@ def _create_storage(self, m, comp, initial_storage, meta): discharge_name = self._create_production_variable(m, comp, meta, tag='discharge', add_bounds=False, within=pyo.NonNegativeReals) # balance level, charge/discharge level_rule_name = prefix + '_level_constr' - rule = lambda mod, t: self._level_rule(comp, level_name, charge_name, discharge_name, - initial_storage, r, mod, t) + rule = lambda mod, t: prl.level_rule(comp, level_name, charge_name, discharge_name, initial_storage, r, mod, t) setattr(m, level_rule_name, pyo.Constraint(m.T, rule=rule)) # periodic boundary condition for storage level if comp.get_interaction().apply_periodic_level: periodic_rule_name = prefix + '_level_periodic_constr' - rule = lambda mod, t: self._periodic_level_rule(comp, level_name, initial_storage, r, mod, t) + rule = lambda mod, t: prl.periodic_level_rule(comp, level_name, initial_storage, r, mod, t) setattr(m, periodic_rule_name, pyo.Constraint(m.T, rule=rule)) # (4) a binary variable to track whether we're charging or discharging, to prevent BOTH happening @@ -730,10 +736,10 @@ def _create_storage(self, m, comp, initial_storage, meta): large_eps = 1e8 #0.01 * sys.float_info.max # charging constraint: don't charge while discharging (note the sign matters) charge_rule_name = prefix + '_charge_constr' - rule = lambda mod, t: self._charge_rule(charge_name, bin_name, large_eps, r, mod, t) + rule = lambda mod, t: prl.charge_rule(charge_name, bin_name, large_eps, r, mod, t) setattr(m, charge_rule_name, pyo.Constraint(m.T, rule=rule)) discharge_rule_name = prefix + '_discharge_constr' - rule = lambda mod, t: self._discharge_rule(discharge_name, bin_name, large_eps, r, mod, t) + rule = lambda mod, t: prl.discharge_rule(discharge_name, bin_name, large_eps, r, mod, t) setattr(m, discharge_rule_name, pyo.Constraint(m.T, rule=rule)) def _create_conservation(self, m, resources, initial_storage, meta): @@ -746,7 +752,7 @@ def _create_conservation(self, m, resources, initial_storage, meta): @ Out, None """ for resource in resources: - rule = lambda mod, t: self._conservation_rule(initial_storage, meta, resource, mod, t) + rule = lambda mod, t: prl.conservation_rule(resource, mod, t) constr = pyo.Constraint(m.T, rule=rule) setattr(m, f'{resource}_conservation', constr) @@ -758,500 +764,5 @@ def _create_objective(self, meta, m): @ Out, None """ # cashflow eval - rule = lambda mod: self._cashflow_rule(meta, mod) + rule = lambda mod: prl.cashflow_rule(self._compute_cashflows, meta, mod) m.obj = pyo.Objective(rule=rule, sense=pyo.maximize) - - ### UTILITIES for general use - def _get_prod_bounds(self, m, comp, meta): - """ - Determines the production limits of the given component - @ In, comp, HERON component, component to get bounds of - @ In, meta, dict, additional state information - @ Out, lower, dict, lower production limit (might be negative for consumption) - @ Out, upper, dict, upper production limit (might be 0 for consumption) - """ - raise NotImplementedError - cap_res = comp.get_capacity_var() # name of resource that defines capacity - r = m.resource_index_map[comp][cap_res] - maxs = [] - mins = [] - for t, time in enumerate(m.Times): - meta['HERON']['time_index'] = t + m.time_offset - cap = comp.get_capacity(meta)[0][cap_res] - low = comp.get_minimum(meta)[0][cap_res] - maxs.append(cap) - if (comp.is_dispatchable() == 'fixed') or (low == cap): - low = cap - # initialize values to avoid boundary errors - var = getattr(m, prod_name) - values = var.get_values() - for k in values: - values[k] = cap - var.set_values(values) - mins.append(low) - maximum = comp.get_capacity(None, None, None, None)[0][cap_res] - # TODO minimum! - # producing or consuming the defining resource? - # -> put these in dictionaries so we can "get" limits or return None - if maximum > 0: - lower = {r: 0} - upper = {r: maximum} - else: - lower = {r: maximum} - upper = {r: 0} - return lower, upper - - def _get_transfer_coeffs(self, m, comp): - """ - Obtains transfer function ratios (assuming Linear ValuedParams) - Form: 1A + 3B -> 2C + 4D - Ratios are calculated with respect to first resource listed, so e.g. B = 3/1 * A - TODO I think we can handle general external functions, maybe? - @ In, m, pyo.ConcreteModel, associated model - @ In, comp, HERON component, component to get coefficients of - @ Out, ratios, dict, ratios of transfer function variables - """ - name = comp.name - transfer = comp.get_interaction().get_transfer() # get the transfer ValuedParam, if any - if transfer is None: - return {} - coeffs = transfer.get_coefficients() # linear transfer coefficients, dict as {resource: coeff}, SIGNS MATTER - # it's all about ratios -> store as ratio of resource / first resource (arbitrary) - first_r = None - first_name = None - first_coef = None - ratios = {} - for resource, coef in coeffs.items(): - r = m.resource_index_map[comp][resource] - if first_r is None: - # set up nominal resource to compare to - first_r = r - first_name = resource - first_coef = coef - ratios['__reference'] = (first_r, first_name, first_coef) - continue - ratio = coef / first_coef - ratios[resource] = ratio - return ratios - - def _retrieve_solution(self, m): - """ - Extracts solution from Pyomo optimization - @ In, m, pyo.ConcreteModel, associated (solved) model - @ Out, result, dict, {comp: {activity: {resource: [production]}} e.g. generator[production][steam] - """ - result = {} # {component: {activity_tag: {resource: production}}} - for comp in m.Components: - result[comp.name] = {} - for tag in comp.get_tracking_vars(): - tag_values = self._retrieve_value_from_model(m, comp, tag) - result[comp.name][tag] = tag_values - return result - - def _retrieve_value_from_model(self, m, comp, tag): - """ - Retrieve values of a series from the pyomo model. - @ In, m, pyo.ConcreteModel, associated (solved) model - @ In, comp, Component, relevant component - @ In, tag, str, particular history type to retrieve - @ Out, result, dict, {resource: [array], etc} - """ - result = {} - prod = getattr(m, f'{comp.name}_{tag}') - if isinstance(prod, pyo.Var): - kind = 'Var' - elif isinstance(prod, pyo.Param): - kind = 'Param' - for res, comp_r in m.resource_index_map[comp].items(): - if kind == 'Var': - result[res] = np.fromiter((prod[comp_r, t].value for t in m.T), dtype=float, count=len(m.T)) - elif kind == 'Param': - result[res] = np.fromiter((prod[comp_r, t] for t in m.T), dtype=float, count=len(m.T)) - return result - - ### RULES for lambda function calls - # these get called using lambda functions to make Pyomo constraints, vars, objectives, etc - - def _charge_rule(self, charge_name, bin_name, large_eps, r, m, t): - """ - Constructs pyomo don't-charge-while-discharging constraints. - For storage units specificially. - @ In, charge_name, str, name of charging variable - @ In, bin_name, str, name of forcing binary variable - @ In, large_eps, float, a large-ish number w.r.t. storage activity - @ In, r, int, index of stored resource (is this always 0?) - @ In, m, pyo.ConcreteModel, associated model - @ In, t, int, time index for capacity rule - @ Out, rule, bool, inequality used to limit charge behavior - """ - charge_var = getattr(m, charge_name) - bin_var = getattr(m, bin_name) - return -charge_var[r, t] <= (1 - bin_var[r, t]) * large_eps - - def _discharge_rule(self, discharge_name, bin_name, large_eps, r, m, t): - """ - Constructs pyomo don't-discharge-while-charging constraints. - For storage units specificially. - @ In, discharge_name, str, name of discharging variable - @ In, bin_name, str, name of forcing binary variable - @ In, large_eps, float, a large-ish number w.r.t. storage activity - @ In, r, int, index of stored resource (is this always 0?) - @ In, m, pyo.ConcreteModel, associated model - @ In, t, int, time index for capacity rule - @ Out, rule, bool, inequality used to limit discharge behavior - """ - discharge_var = getattr(m, discharge_name) - bin_var = getattr(m, bin_name) - return discharge_var[r, t] <= bin_var[r, t] * large_eps - - def _level_rule(self, comp, level_name, charge_name, discharge_name, initial_storage, r, m, t): - """ - Constructs pyomo charge-discharge-level balance constraints. - For storage units specificially. - @ In, comp, Component, storage component of interest - @ In, level_name, str, name of level-tracking variable - @ In, charge_name, str, name of charging variable - @ In, discharge_name, str, name of discharging variable - @ In, initial_storage, dict, initial storage levels by component - @ In, r, int, index of stored resource (is this always 0?) - @ In, m, pyo.ConcreteModel, associated model - @ In, t, int, time index for capacity rule - @ Out, rule, bool, inequality used to limit level behavior - """ - level_var = getattr(m, level_name) - charge_var = getattr(m, charge_name) - discharge_var = getattr(m, discharge_name) - if t > 0: - previous = level_var[r, t-1] - dt = m.Times[t] - m.Times[t-1] - else: - previous = initial_storage[comp] - dt = m.Times[1] - m.Times[0] - rte2 = comp.get_sqrt_RTE() # square root of the round-trip efficiency - production = - rte2 * charge_var[r, t] - discharge_var[r, t] / rte2 - return level_var[r, t] == previous + production * dt - - def _periodic_level_rule(self, comp, level_name, initial_storage, r, m, t): - """ - Mandates storage units end with the same level they start with, which prevents - "free energy" or "free sink" due to initial starting levels. - For storage units specificially. - @ In, comp, Component, storage component of interest - @ In, level_name, str, name of level-tracking variable - @ In, initial_storage, dict, initial storage levels by component - @ In, r, int, index of stored resource (is this always 0?) - @ In, m, pyo.ConcreteModel, associated model - @ In, t, int, time index for capacity rule - @ Out, rule, bool, inequality used to limit level behavior - """ - return getattr(m, level_name)[r, m.T[-1]] == initial_storage[comp] - - def _capacity_rule(self, prod_name, r, caps, m, t): - """ - Constructs pyomo capacity constraints. - @ In, prod_name, str, name of production variable - @ In, r, int, index of resource for capacity constraining - @ In, caps, list(float), value to constrain resource at in time - @ In, m, pyo.ConcreteModel, associated model - @ In, t, int, time index for capacity rule - """ - kind = 'lower' if min(caps) < 0 else 'upper' - return self._prod_limit_rule(prod_name, r, caps, kind, t, m) - - def _prod_limit_rule(self, prod_name, r, limits, kind, t, m): - """ - Constructs pyomo production constraints. - @ In, prod_name, str, name of production variable - @ In, r, int, index of resource for capacity constraining - @ In, limits, list(float), values in time at which to constrain resource production - @ In, kind, str, either 'upper' or 'lower' for limiting production - @ In, t, int, time index for production rule (NOTE not pyomo index, rather fixed index) - @ In, m, pyo.ConcreteModel, associated model - """ - prod = getattr(m, prod_name) - if kind == 'lower': - # production must exceed value - return prod[r, t] >= limits[t] - elif kind == 'upper': - return prod[r, t] <= limits[t] - else: - raise TypeError('Unrecognized production limit "kind":', kind) - - def _ramp_rule_down(self, prod_name, r, limit, neg_cap, t, m, bins=None): - """ - Constructs pyomo production ramping constraints for reducing production level. - Note that this is number-getting-less-positive for positive-defined capacity, while - it is number-getting-less-negative for negative-defined capacity. - This means that dQ is negative for positive-defined capacity, but positive for vice versa - @ In, prod_name, str, name of production variable - @ In, r, int, index of resource for capacity constraining - @ In, limit, float, limiting change in production level across time steps. NOTE: negative for negative-defined capacity. - @ In, neg_cap, bool, True if capacity is expressed as negative (consumer) - @ In, t, int, time index for ramp limit rule (NOTE not pyomo index, rather fixed index) - @ In, m, pyo.ConcreteModel, associated model - @ In, bins, tuple, optional, (lower, upper, steady) binaries if limiting ramp frequency - @ Out, rule, expression, evaluation for Pyomo constraint - """ - prod = getattr(m, prod_name) - if t == 0: - return pyo.Constraint.Skip - delta = prod[r, t] - prod[r, t-1] - # special treatment if we have frequency-limiting binaries available - if bins is None: - if neg_cap: - # NOTE change in production should be "less positive" than the max - # "negative decrease" in production (decrease is positive when defined by consuming) - return delta <= - limit - else: - # dq is negative, - limit is negative - return delta >= - limit - else: - eps = 1.0 # aux parameter to force binaries to behave, TODO needed? - down = bins[0][t] - up = bins[1][t] - # NOTE we're following the convention that "less negative" is ramping "down" - # for capacity defined by consumption - # e.g. consuming 100 ramps down to consuming 70 is (-100 -> -70), dq = 30 - if neg_cap: - # dq <= limit * dt * Bu + eps * Bd, if limit <= 0 - # dq is positive, - limit is positive - return delta <= - limit * down - eps * up - else: - # dq <= limit * dt * Bu - eps * Bd, if limit >= 0 - # dq is negative, - limit is negative - return delta >= - limit * down + eps * up - - def _ramp_rule_up(self, prod_name, r, limit, neg_cap, t, m, bins=None): - """ - Constructs pyomo production ramping constraints. - @ In, prod_name, str, name of production variable - @ In, r, int, index of resource for capacity constraining - @ In, limit, float, limiting change in production level across time steps - @ In, neg_cap, bool, True if capacity is expressed as negative (consumer) - @ In, t, int, time index for ramp limit rule (NOTE not pyomo index, rather fixed index) - @ In, m, pyo.ConcreteModel, associated model - @ In, bins, tuple, optional, (lower, steady, upper) binaries if limiting ramp frequency - """ - prod = getattr(m, prod_name) - if t == 0: - return pyo.Constraint.Skip - delta = prod[r, t] - prod[r, t-1] - if bins is None: - if neg_cap: - # NOTE change in production should be "more positive" than the max - # "negative increase" in production (increase is negative when defined by consuming) - return delta >= limit - else: - # change in production should be less than the max production increase - return delta <= limit - else: - # special treatment if we have frequency-limiting binaries available - eps = 1.0 # aux parameter to force binaries to behave, TODO needed? - down = bins[0][t] - up = bins[1][t] - # NOTE we're following the convention that "more negative" is ramping "up" - # for capacity defined by consumption - # e.g. consuming 100 ramps up to consuming 130 is (-100 -> -130), dq = -30 - if neg_cap: - # dq >= limit * dt * Bu + eps * Bd, if limit <= 0 - return delta >= limit * up + eps * down - else: - # dq <= limit * dt * Bu - eps * Bd, if limit >= 0 - return delta <= limit * up - eps * down - - def _ramp_freq_rule(self, Bd, Bu, tao, t, m): - """ - Constructs pyomo frequency-of-ramp constraints. - @ In, Bd, bool var, binary tracking down-ramp events - @ In, Bu, bool var, binary tracking up-ramp events - @ In, tao, int, number of time steps to look back - @ In, t, int, time step indexer - @ In, m, pyo.ConcreteModel, pyomo model - """ - if t == 0: - return pyo.Constraint.Skip - # looking-back-window shouldn't be longer than existing time - tao = min(t, tao) - # how many ramp-down events in backward window? - tally = sum(1 - Bd[tm] for tm in range(t - tao, t)) - # only allow ramping up if no rampdowns in back window - ## but we can't use if statements, so use binary math - return Bu[t] <= 1 / tao * tally - - def _ramp_freq_bins_rule(self, Bd, Bu, Bn, t, m): - """ - Constructs pyomo constraint for ramping event tracking variables. - This forces choosing between ramping up, ramping down, and steady state operation. - @ In, Bd, bool var, binary tracking down-ramp events - @ In, Bu, bool var, binary tracking up-ramp events - @ In, Bn, bool var, binary tracking no-ramp events - @ In, t, int, time step indexer - @ In, m, pyo.ConcreteModel, pyomo model - """ - return Bd[t] + Bu[t] + Bn[t] == 1 - - def _cashflow_rule(self, meta, m): - """ - Objective function rule. - @ In, meta, dict, additional variable passthrough - @ In, m, pyo.ConcreteModel, associated model - @ Out, total, float, evaluation of cost - """ - activity = m.Activity # dict((comp, getattr(m, f"{comp.name}_production")) for comp in m.Components) - state_args = {'valued': False} - total = self._compute_cashflows(m.Components, activity, m.Times, meta, - state_args=state_args, time_offset=m.time_offset) - return total - - def _conservation_rule(self, initial_storage, meta, res, m, t): - """ - Constructs conservation constraints. - @ In, initial_storage, dict, initial storage levels at t==0 (not t+offset==0) - @ In, meta, dict, dictionary of state variables - @ In, res, str, name of resource - @ In, m, pyo.ConcreteModel, associated model - @ In, t, int, index of time variable - @ Out, conservation, bool, balance check - """ - balance = 0 # sum of production rates, which needs to be zero - for comp, res_dict in m.resource_index_map.items(): - if res in res_dict: - # activity information depends on if storage or component - r = res_dict[res] - intr = comp.get_interaction() - if intr.is_type('Storage'): - # Storages have 3 variables: level, charge, and discharge - # -> so calculate activity - charge = getattr(m, f'{comp.name}_charge') - discharge = getattr(m, f'{comp.name}_discharge') - # note that "charge" is negative (as it's consuming) and discharge is positive - # -> so the intuitive |discharge| - |charge| becomes discharge + charge - production = discharge[r, t] + charge[r, t] - else: - var = getattr(m, f'{comp.name}_production') - # TODO move to this? balance += m._activity.get_activity(comp, res, t) - production = var[r, t] - balance += production - return balance == 0 # TODO tol? - - def _min_prod_rule(self, prod_name, r, caps, minimums, m, t): - """ - Constructs minimum production constraint - @ In, prod_name, str, name of production variable - @ In, r, int, index of resource for capacity constraining - @ In, caps, list(float), capacity(t) value for component - @ In, minimums, list(float), minimum allowable production in time - @ In, m, pyo.ConcreteModel, associated model - @ In, t, int, index of time variable - @ Out, minimum, bool, min check - """ - prod = getattr(m, prod_name) - # negative capacity means consuming instead of producing - if max(caps) > 0: - return prod[r, t] >= minimums[t] - else: - return prod[r, t] <= minimums[t] - - def _transfer_rule(self, ratio, r, ref_r, prod_name, m, t): - """ - Constructs transfer function constraints - @ In, ratio, float, ratio for resource to nominal first resource - @ In, r, int, index of transfer resource - @ In, ref_r, int, index of reference resource - @ In, prod_name, str, name of production variable - @ In, m, pyo.ConcreteModel, associated model - @ In, t, int, index of time variable - @ Out, transfer, bool, transfer ratio check - """ - prod = getattr(m, prod_name) - return prod[r, t] == prod[ref_r, t] * ratio # TODO tolerance?? - - ### DEBUG - def _debug_pyomo_print(self, m): - """ - Prints the setup pieces of the Pyomo model - @ In, m, pyo.ConcreteModel, model to interrogate - @ Out, None - """ - print('/' + '='*80) - print('DEBUGG model pieces:') - print(' -> objective:') - print(' ', m.obj.pprint()) - print(' -> variables:') - for var in m.component_objects(pyo.Var): - print(' ', var.pprint()) - print(' -> constraints:') - for constr in m.component_objects(pyo.Constraint): - print(' ', constr.pprint()) - print('\\' + '='*80) - print('') - - def _debug_print_soln(self, m): - """ - Prints the solution from the Pyomo model - @ In, m, pyo.ConcreteModel, model to interrogate - @ Out, None - """ - print('*'*80) - print('DEBUGG solution:') - print(' objective value:', m.obj()) - for c, comp in enumerate(m.Components): - name = comp.name - print(' component:', c, name) - for tracker in comp.get_tracking_vars(): - for res, r in m.resource_index_map[comp].items(): - print(f' tracker: {tracker} resource {r}: {res}') - for t, time in enumerate(m.Times): - prod = getattr(m, f'{name}_{tracker}') - if isinstance(prod, pyo.Var): - print(' time:', t + m.time_offset, time, prod[r, t].value) - elif isinstance(prod, pyo.Param): - print(' time:', t + m.time_offset, time, prod[r, t]) - print('*'*80) - - - - - -# DispatchState for Pyomo dispatcher -class PyomoState(DispatchState): - def __init__(self): - """ - Constructor. - @ In, None - @ Out, None - """ - DispatchState.__init__(self) - self._model = None # Pyomo model object - - def initialize(self, components, resources_map, times, model): - """ - Connect information about this State to other objects - @ In, components, list, HERON components - @ In, resources_map, dict, map of component names to resources used - @ In, times, np.array, values of "time" this state represents - @ In, model, pyomo.Model, associated model for this state - @ Out, None - """ - DispatchState.initialize(self, components, resources_map, times) - self._model = model - - def get_activity_indexed(self, comp, activity, r, t, valued=True, **kwargs): - """ - Getter for activity level. - @ In, comp, HERON Component, component whose information should be retrieved - @ In, activity, str, tracking variable name for activity subset - @ In, r, int, index of resource to retrieve (as given by meta[HERON][resource_indexer]) - @ In, t, int, index of time at which activity should be provided - @ In, valued, bool, optional, if True then get float value instead of pyomo expression - @ In, kwargs, dict, additional pass-through keyword arguments - @ Out, activity, float, amount of resource "res" produced/consumed by "comp" at time "time"; - note positive is producting, negative is consuming - """ - prod = getattr(self._model, f'{comp.name}_{activity}')[r, t] - if valued: - return prod() - return prod - - def set_activity_indexed(self, comp, r, t, value, valued=False): - raise NotImplementedError From c5d8649f7d04c161c9d222b2354ea10818ca5115 Mon Sep 17 00:00:00 2001 From: Dylan McDowell Date: Mon, 13 Nov 2023 15:54:46 -0700 Subject: [PATCH 02/24] Refactor dispatch method to be more clear --- src/_utils.py | 11 -- src/dispatch/putils.py | 11 ++ src/dispatch/pyomo_dispatch.py | 216 ++++++++++++++++++--------------- 3 files changed, 127 insertions(+), 111 deletions(-) diff --git a/src/_utils.py b/src/_utils.py index 58e2cf13..0d33f573 100644 --- a/src/_utils.py +++ b/src/_utils.py @@ -92,17 +92,6 @@ def get_farm_loc(raven_path=None): # Added by Haoyu Wang, May 25, 2022 farm_loc = plugin_handler.getPluginLocation('FARM') return farm_loc -def get_all_resources(components): - """ - Provides a set of all resources used among all components - @ In, components, list, HERON component objects - @ Out, resources, list, resources used in case - """ - res = set() - for comp in components: - res.update(comp.get_resources()) - return res - def get_project_lifetime(case, components): """ obtains the project lifetime diff --git a/src/dispatch/putils.py b/src/dispatch/putils.py index 04d69a99..ed31ea43 100644 --- a/src/dispatch/putils.py +++ b/src/dispatch/putils.py @@ -3,6 +3,17 @@ import numpy as np import pyomo.environ as pyo +def get_all_resources(components): + """ + Provides a set of all resources used among all components + @ In, components, list, HERON component objects + @ Out, resources, list, resources used in case + """ + res = set() + for comp in components: + res.update(comp.get_resources()) + return res + def get_prod_bounds(m, comp, meta): """ Determines the production limits of the given component diff --git a/src/dispatch/pyomo_dispatch.py b/src/dispatch/pyomo_dispatch.py index db409ba7..b4dc7af5 100644 --- a/src/dispatch/pyomo_dispatch.py +++ b/src/dispatch/pyomo_dispatch.py @@ -19,7 +19,7 @@ from . import PyomoRuleLibrary as prl from . import putils from .Dispatcher import Dispatcher -from .DispatchState import DispatchState, NumpyState, PyomoState +from .DispatchState import NumpyState, PyomoState from .. import _utils as hutils @@ -40,17 +40,22 @@ 'glpk': 'mipgap', } +class DispatchError(Exception): + """Custom exception for dispatch errors.""" + pass + class Pyomo(Dispatcher): """ Dispatches using rolling windows in Pyomo """ - naming_template = {'comp prod': '{comp}|{res}|prod', - 'comp transfer': '{comp}|{res}|trans', - 'comp max': '{comp}|{res}|max', - 'comp ramp up': '{comp}|{res}|rampup', - 'comp ramp down': '{comp}|{res}|rampdown', - 'conservation': '{res}|consv', - } + naming_template = { + 'comp prod': '{comp}|{res}|prod', + 'comp transfer': '{comp}|{res}|trans', + 'comp max': '{comp}|{res}|max', + 'comp ramp up': '{comp}|{res}|rampup', + 'comp ramp down': '{comp}|{res}|rampdown', + 'conservation': '{res}|consv', + } ### INITIALIZATION @classmethod @@ -178,64 +183,116 @@ def dispatch(self, case, components, sources, meta): @ Out, disp, DispatchScenario, resulting dispatch """ t_start, t_end, t_num = self.get_time_discr() - time = np.linspace(t_start, t_end, t_num) # Note we don't care about segment/cluster here - resources = sorted(list(hutils.get_all_resources(components))) # list of all active resources - # pre-build results structure - ## we can use NumpyState here so we don't need to worry about a Pyomo model object - dispatch = NumpyState()# dict((comp.name, dict((res, np.zeros(len(time))) for res in comp.get_resources())) for comp in components) + time = np.linspace(t_start, t_end, t_num) + resources = sorted(putils.get_all_resources(components)) + dispatch = NumpyState() dispatch.initialize(components, meta['HERON']['resource_indexer'], time) - # rolling window + start_index = 0 final_index = len(time) - # TODO window overlap! ( )[ ] -> ( [ ) ] + initial_levels = self._get_initial_storage_levels(components, meta, 0) + while start_index < final_index: - end_index = start_index + self._window_len - if end_index > final_index: - end_index = final_index - if end_index - start_index == 1: - # TODO custom error raise for catching in DispatchManager? - raise IOError("A rolling window of length 1 was requested, but this causes crashes in pyomo. " + - "Change the length of the rolling window to avoid length 1 histories.") - specific_time = time[start_index:end_index] - print(f'DEBUGG starting window {start_index} to {end_index}') - start = time_mod.time() - # set initial storage levels - initial_levels = {} - for comp in components: + end_index = min(start_index + self._window_len, final_index) + self._validate_window_length(start_index, end_index) + + specific_time = time[start_index:end_index] + subdisp, solve_time = self._solve_dispatch_window(specific_time, start_index, case, components, sources, resources, initial_levels, meta) + + self._store_results_in_dispatch(dispatch, subdisp, components, start_index, end_index) + start_index = end_index + + return dispatch + + @staticmethod + def _get_initial_storage_levels(components, meta, start_index): + """ + """ + initial_levels = {} + for comp in components: if comp.get_interaction().is_type('Storage'): - if start_index == 0: - initial_levels[comp] = comp.get_interaction().get_initial_level(meta) - else: - initial_levels[comp] = subdisp[comp.name]['level'][comp.get_interaction().get_resource()][-1] - # allow for converging solution iteratively - converged = False - conv_counter = 0 - previous = None - while not converged: + if start_index == 0: + initial_levels[comp] = comp.get_interaction().get_initial_level(meta) + + return initial_levels + + @staticmethod + def _validate_window_length(start_index, end_index): + """ + """ + if end_index - start_index == 1: + raise DispatchError("Window length of 1 detected, which is not supported.") + + def _solve_dispatch_window(self, specific_time, start_index, case, components, sources, resources, initial_levels, meta): + """ + """ + start = time_mod.time() + subdisp = self.dispatch_window(specific_time, start_index, case, components, sources, resources, initial_levels, meta) + end = time_mod.time() + solve_time = end - start + + conv_counter = 0 + converged = not self.needs_convergence(components) + previous = None + + while not converged and conv_counter < self._picard_limit: conv_counter += 1 - if conv_counter > self._picard_limit: - break - # dispatch - subdisp = self.dispatch_window(specific_time, start_index, - case, components, sources, resources, - initial_levels, meta) - # do we need a convergence criteria? Check now. - if self.needs_convergence(components): - print(f'DEBUGG iteratively solving window, iteration {conv_counter}/{self._picard_limit} ...') - converged = self.check_converged(subdisp, previous, components) - previous = subdisp - else: - converged = True + subdisp = self.dispatch_window(specific_time, start_index, case, components, sources, resources, initial_levels, meta) + converged = self.check_converged(subdisp, previous, components) + previous = subdisp + + if conv_counter >= self._picard_limit and not converged: + raise DispatchError(f"Convergence not reached after {self._picard_limit} iterations.") + + return subdisp, solve_time - end = time_mod.time() - print(f'DEBUGG solve time: {end-start} s') - # store result in corresponding part of dispatch - for comp in components: + def _store_results_in_dispatch(self, dispatch, subdisp, components, start_index, end_index): + """ + """ + for comp in components: for tag in comp.get_tracking_vars(): - for res, values in subdisp[comp.name][tag].items(): - dispatch.set_activity_vector(comp, res, values, tracker=tag, start_idx=start_index, end_idx=end_index) - start_index = end_index - return dispatch + for res, values in subdisp[comp.name][tag].items(): + dispatch.set_activity_vector(comp, res, values, tracker=tag, start_idx=start_index, end_idx=end_index) + + def check_converged(self, new, old, components): + """ + Checks convergence of consecutive dispatch solves + @ In, new, dict, results of dispatch # TODO should this be the model rather than dict? + @ In, old, dict, results of previous dispatch + @ In, components, list, HERON component list + @ Out, converged, bool, True if convergence is met + """ + tol = 1e-4 # TODO user option + if old is None: + return False + converged = True + for comp in components: + intr = comp.get_interaction() + name = comp.name + tracker = comp.get_tracking_vars()[0] + if intr.is_governed(): # by "is_governed" we mean "isn't optimized in pyomo" + # check activity L2 norm as a differ + # TODO this may be specific to storage right now + res = intr.get_resource() + scale = np.max(old[name][tracker][res]) + diff = np.linalg.norm(new[name][tracker][res] - old[name][tracker][res]) / (scale if scale != 0 else 1) + if diff > tol: + converged = False + return converged + + def needs_convergence(self, components): + """ + Determines whether the current setup needs convergence to solve. + @ In, components, list, HERON component list + @ Out, needs_convergence, bool, True if iteration is needed + """ + for comp in components: + intr = comp.get_interaction() + # storages with a prescribed strategy MAY need iteration + if intr.is_governed(): + return True + # if we get here, no iteration is needed + return False def get_solver(self): """ @@ -389,48 +446,7 @@ def _solve_dispatch(self, m, meta): # return dict of numpy arrays result = putils.retrieve_solution(m) return result - - def check_converged(self, new, old, components): - """ - Checks convergence of consecutive dispatch solves - @ In, new, dict, results of dispatch # TODO should this be the model rather than dict? - @ In, old, dict, results of previous dispatch - @ In, components, list, HERON component list - @ Out, converged, bool, True if convergence is met - """ - tol = 1e-4 # TODO user option - if old is None: - return False - converged = True - for comp in components: - intr = comp.get_interaction() - name = comp.name - tracker = comp.get_tracking_vars()[0] - if intr.is_governed(): # by "is_governed" we mean "isn't optimized in pyomo" - # check activity L2 norm as a differ - # TODO this may be specific to storage right now - res = intr.get_resource() - scale = np.max(old[name][tracker][res]) - diff = np.linalg.norm(new[name][tracker][res] - old[name][tracker][res]) / (scale if scale != 0 else 1) - if diff > tol: - converged = False - return converged - - def needs_convergence(self, components): - """ - Determines whether the current setup needs convergence to solve. - @ In, components, list, HERON component list - @ Out, needs_convergence, bool, True if iteration is needed - """ - for comp in components: - intr = comp.get_interaction() - # storages with a prescribed strategy MAY need iteration - if intr.is_governed(): - return True - # if we get here, no iteration is needed - return False - - + ### PYOMO Element Constructors def _create_production_limit(self, m, validation): """ From 5089fecdd86eaa7ca0b09dc33adeff2248283f6b Mon Sep 17 00:00:00 2001 From: Dylan McDowell Date: Tue, 14 Nov 2023 16:06:04 -0700 Subject: [PATCH 03/24] More refactoring of pyomo_dispatch --- src/dispatch/pyomo_dispatch.py | 205 ++++++++++++++++++++------------- 1 file changed, 123 insertions(+), 82 deletions(-) diff --git a/src/dispatch/pyomo_dispatch.py b/src/dispatch/pyomo_dispatch.py index b4dc7af5..003e9016 100644 --- a/src/dispatch/pyomo_dispatch.py +++ b/src/dispatch/pyomo_dispatch.py @@ -1,4 +1,3 @@ - # Copyright 2020, Battelle Energy Alliance, LLC # ALL RIGHTS RESERVED """ @@ -9,22 +8,20 @@ import pprint import numpy as np import pyutilib.subprocess.GlobalData -from itertools import compress import pyomo.environ as pyo from pyomo.opt import SolverStatus, TerminationCondition from pyomo.common.errors import ApplicationError from ravenframework.utils import InputData, InputTypes -from . import PyomoRuleLibrary as prl from . import putils +from . import PyomoRuleLibrary as prl from .Dispatcher import Dispatcher from .DispatchState import NumpyState, PyomoState -from .. import _utils as hutils - # allows pyomo to solve on threaded processes pyutilib.subprocess.GlobalData.DEFINE_SIGNAL_HANDLERS_DEFAULT = False + # Choose solver; CBC is a great choice unless we're on Windows if platform.system() == 'Windows': SOLVERS = ['glpk', 'cbc', 'ipopt'] @@ -34,14 +31,16 @@ # different solvers express "tolerance" for converging solution in different # ways. Further, they mean different things for different solvers. This map # just tracks the "nominal" argument that we should pass through pyomo. -solver_tol_map = { +SOLVER_TOL_MAP = { 'ipopt': 'tol', 'cbc': 'primalTolerance', 'glpk': 'mipgap', } class DispatchError(Exception): - """Custom exception for dispatch errors.""" + """ + Custom exception for dispatch errors. + """ pass class Pyomo(Dispatcher): @@ -89,6 +88,7 @@ def get_input_specs(cls): # TODO specific for pyomo dispatcher return specs + def __init__(self): """ Constructor. @@ -103,6 +103,7 @@ def __init__(self): self._solver = None # overwrite option for solver self._picard_limit = 10 # iterative solve limit + def read_input(self, specs): """ Read in input specifications. @@ -114,7 +115,7 @@ def read_input(self, specs): window_len_node = specs.findFirst('rolling_window_length') if window_len_node is not None: self._window_len = window_len_node.value - + debug_node = specs.findFirst('debug_mode') if debug_node is not None: self.debug_mode = debug_node.value @@ -123,54 +124,52 @@ def read_input(self, specs): if solver_node is not None: self._solver = solver_node.value - # the tolerance value needs to be saved for after the solver is set - # since we don't know what key to assign it to otherwise tol_node = specs.findFirst('tol') if tol_node is not None: solver_tol = tol_node.value else: solver_tol = None - if self._solver is None: - solvers_to_check = SOLVERS - else: - solvers_to_check = [self._solver] - # check solver exists - for solver in solvers_to_check: - self._solver = solver - found_solver = True - try: - if not pyo.SolverFactory(self._solver).available(): - found_solver = False - else: - break - except ApplicationError: - found_solver = False - # NOTE: we probably need a consistent way to test and check viable solvers, - # maybe through a unit test that mimics the model setup here. For now, I assume - # that anything that shows as not available or starts with an underscore is not - # viable, and it will crash if the solver can't solve our kinds of models. - # This should only come up if the user is specifically requesting a solver, though, - # the default glpk and cbc are tested. - if not found_solver: - all_options = pyo.SolverFactory._cls.keys() # TODO shorten to list of tested options? - solver_filter = [] - for op in all_options: - if op.startswith('_'): # These don't seem like legitimate options, based on testing - solver_filter.append(False) - continue - try: - solver_filter.append(pyo.SolverFactory(op).available()) - except (ApplicationError, NameError, ImportError): - solver_filter.append(False) - available = list(compress(all_options, solver_filter)) - msg = f'Requested solver "{self._solver}" was not found for pyomo dispatcher!' - msg += f' Options MAY include: {available}' - raise RuntimeError(msg) + self._solver = self._check_solver_availability(self._solver) if solver_tol is not None: - key = solver_tol_map[self._solver] - self.solve_options[key] = solver_tol + key = SOLVER_TOL_MAP.get(self._solver, None) + if key is not None: + self.solve_options[key] = solver_tol + else: + raise ValueError(f"Tolerance setting not available for solver '{self._solver}'.") + + + def _check_solver_availability(self, requested_solver): + """ + """ + solvers_to_check = SOLVERS if requested_solver is None else [requested_solver] + for solver in solvers_to_check: + if self._is_solver_available(solver): + return solver + + available_sovers = self._get_available_solvers() + raise RuntimeError( + f'Requested solver "{requested_solver} not found. Available options may include: {available_sovers}.' + ) + + + def _get_available_solvers(self): + """ + """ + all_options = pyo.SolverFactory._cls.keys() # TODO shorten to list of tested options? + available = [op for op in all_options if not op.startswith('_') and self._is_solver_available(op)] + return available + + + def _is_solver_available(self, solver): + """ + """ + try: + return pyo.SolverFactory(solver).available() + except (ApplicationError, NameError, ImportError): + return False + ### API def dispatch(self, case, components, sources, meta): @@ -190,95 +189,123 @@ def dispatch(self, case, components, sources, meta): start_index = 0 final_index = len(time) - initial_levels = self._get_initial_storage_levels(components, meta, 0) + initial_levels = self._get_initial_storage_levels(components, meta, start_index) while start_index < final_index: end_index = min(start_index + self._window_len, final_index) self._validate_window_length(start_index, end_index) specific_time = time[start_index:end_index] - subdisp, solve_time = self._solve_dispatch_window(specific_time, start_index, case, components, sources, resources, initial_levels, meta) + subdisp, solve_time = self._handle_dispatch_window_solve( + specific_time, start_index, case, components, sources, resources, initial_levels, meta + ) + print(f'DEBUGG solve time: {solve_time} s') self._store_results_in_dispatch(dispatch, subdisp, components, start_index, end_index) start_index = end_index return dispatch - + + @staticmethod def _get_initial_storage_levels(components, meta, start_index): """ + """ initial_levels = {} for comp in components: if comp.get_interaction().is_type('Storage'): if start_index == 0: initial_levels[comp] = comp.get_interaction().get_initial_level(meta) - + # NOTE: There used to be an else conditional here that depended on the + # variable `subdisp` which was not defined yet. Leaving an unreachable + # branch of code, thus, I removed it. So currently, this function assumes + # start_index will always be zero, otherwise it will return an empty dict. + # Here was the line in case we need it in the future: + # else: initial_levels[comp] = subdisp[comp.name]['level'][comp.get_interaction().get_resource()][-1] return initial_levels - + + @staticmethod def _validate_window_length(start_index, end_index): """ + """ if end_index - start_index == 1: raise DispatchError("Window length of 1 detected, which is not supported.") - def _solve_dispatch_window(self, specific_time, start_index, case, components, sources, resources, initial_levels, meta): + + def _handle_dispatch_window_solve(self, specific_time, start_index, case, components, sources, resources, initial_levels, meta): """ + """ start = time_mod.time() subdisp = self.dispatch_window(specific_time, start_index, case, components, sources, resources, initial_levels, meta) - end = time_mod.time() - solve_time = end - start - - conv_counter = 0 - converged = not self.needs_convergence(components) - previous = None - while not converged and conv_counter < self._picard_limit: + if self.needs_convergence(components): + conv_counter = 0 + converged = False + previous = None + + while not converged and conv_counter < self._picard_limit: conv_counter += 1 + print(f'DEBUGG iteratively solving window, iteration {conv_counter}/{self._picard_limit} ...') subdisp = self.dispatch_window(specific_time, start_index, case, components, sources, resources, initial_levels, meta) converged = self.check_converged(subdisp, previous, components) previous = subdisp - if conv_counter >= self._picard_limit and not converged: + if conv_counter >= self._picard_limit and not converged: raise DispatchError(f"Convergence not reached after {self._picard_limit} iterations.") - + + else: + # No convergence process needed + pass + + end = time_mod.time() + solve_time = end - start return subdisp, solve_time + def _store_results_in_dispatch(self, dispatch, subdisp, components, start_index, end_index): """ + """ for comp in components: for tag in comp.get_tracking_vars(): for res, values in subdisp[comp.name][tag].items(): dispatch.set_activity_vector(comp, res, values, tracker=tag, start_idx=start_index, end_idx=end_index) - def check_converged(self, new, old, components): + + def check_converged(self, new, old, components, tol=1e-4): """ Checks convergence of consecutive dispatch solves @ In, new, dict, results of dispatch # TODO should this be the model rather than dict? @ In, old, dict, results of previous dispatch @ In, components, list, HERON component list + @ In, tol, float, optional, tolerance for convergence @ Out, converged, bool, True if convergence is met """ - tol = 1e-4 # TODO user option if old is None: return False - converged = True + for comp in components: intr = comp.get_interaction() - name = comp.name - tracker = comp.get_tracking_vars()[0] if intr.is_governed(): # by "is_governed" we mean "isn't optimized in pyomo" # check activity L2 norm as a differ # TODO this may be specific to storage right now + name = comp.name + tracker = comp.get_tracking_vars()[0] res = intr.get_resource() scale = np.max(old[name][tracker][res]) - diff = np.linalg.norm(new[name][tracker][res] - old[name][tracker][res]) / (scale if scale != 0 else 1) + # Avoid division by zero + if scale == 0: + diff = np.linalg.norm(new[name][tracker][res] - old[name][tracker][res]) + else: + diff = np.linalg.norm(new[name][tracker][res] - old[name][tracker][res]) / scale if diff > tol: - converged = False - return converged + return False + return True + def needs_convergence(self, components): """ @@ -286,13 +313,8 @@ def needs_convergence(self, components): @ In, components, list, HERON component list @ Out, needs_convergence, bool, True if iteration is needed """ - for comp in components: - intr = comp.get_interaction() - # storages with a prescribed strategy MAY need iteration - if intr.is_governed(): - return True - # if we get here, no iteration is needed - return False + return any(comp.get_interaction().is_governed() for comp in components) + def get_solver(self): """ @@ -302,6 +324,7 @@ def get_solver(self): """ return self._solver + ### INTERNAL @staticmethod def _calculate_deltas(activity, initial_level): @@ -312,6 +335,7 @@ def _calculate_deltas(activity, initial_level): deltas[0] = activity[0] - initial_level return deltas + def _process_storage_component(self, m, comp, intr, meta): """ """ @@ -325,6 +349,7 @@ def _process_storage_component(self, m, comp, intr, meta): self._create_production_param(m, comp, charge, tag="charge") self._create_production_param(m, comp, discharge, tag="discharge") + def _process_components(self, m, comp, time, initial_storage, meta): """ """ @@ -336,6 +361,7 @@ def _process_components(self, m, comp, time, initial_storage, meta): else: self._create_production(m, comp, meta) + def _process_governed_component(self, m, comp, time, intr, meta): """ """ @@ -345,7 +371,8 @@ def _process_governed_component(self, m, comp, time, intr, meta): else: activity = intr.get_strategy().evaluate(meta)[0]['level'] self._create_production_param(m, comp, activity) - + + def _build_pyomo_model(self, time, time_offset, case, components, resources, meta): """ """ @@ -370,7 +397,8 @@ def _build_pyomo_model(self, time, time_offset, case, components, resources, met m.Activity = PyomoState() m.Activity.initialize(m.Components, m.resource_index_map, m.Times, m) return m - + + def _populate_pyomo_model(self, model, components, initial_storage, time, resources, meta): """ """ @@ -380,6 +408,7 @@ def _populate_pyomo_model(self, model, components, initial_storage, time, resour self._create_conservation(model, resources, initial_storage, meta) # conservation of resources (e.g. production == consumption) self._create_objective(meta, model) # objective + def dispatch_window(self, time, time_offset, case, components, sources, resources, initial_storage, meta): """ Dispatches one part of a rolling window. @@ -397,7 +426,8 @@ def dispatch_window(self, time, time_offset, case, components, sources, resource self._populate_pyomo_model(model, components, initial_storage, time, resources, meta) result = self._solve_dispatch(model, meta) return result - + + def _solve_dispatch(self, m, meta): """ """ @@ -446,7 +476,8 @@ def _solve_dispatch(self, m, meta): # return dict of numpy arrays result = putils.retrieve_solution(m) return result - + + ### PYOMO Element Constructors def _create_production_limit(self, m, validation): """ @@ -477,6 +508,7 @@ def _create_production_limit(self, m, validation): setattr(m, name, constr) print(f'DEBUGG added validation constraint "{name}"') + def _create_production_param(self, m, comp, values, tag=None): """ Creates production pyomo fixed parameter object for a component @@ -498,6 +530,7 @@ def _create_production_param(self, m, comp, values, tag=None): setattr(m, prod_name, prod) return prod_name + def _create_production(self, m, comp, meta): """ Creates all pyomo variable objects for a non-storage component @@ -521,6 +554,7 @@ def _create_production(self, m, comp, meta): self._create_ramp_limit(m, comp, prod_name, meta) return prod_name + def _create_production_variable(self, m, comp, meta, tag=None, add_bounds=True, **kwargs): """ Creates production pyomo variable object for a component @@ -570,6 +604,7 @@ def _create_production_variable(self, m, comp, meta, tag=None, add_bounds=True, setattr(m, prod_name, prod) return prod_name + def _create_ramp_limit(self, m, comp, prod_name, meta): """ Creates ramping limitations for a producing component @@ -623,6 +658,7 @@ def _create_ramp_limit(self, m, comp, prod_name, meta): constr = pyo.Constraint(m.T, rule=freq_rule) setattr(m, f'{comp.name}_ramp_freq_constr', constr) + def _create_capacity_constraints(self, m, comp, prod_name, meta): """ Creates pyomo capacity constraints @@ -654,6 +690,7 @@ def _create_capacity_constraints(self, m, comp, prod_name, meta): var.set_values(values) setattr(m, f'{comp.name}_{cap_res}_minprod_constr', constr) + def _find_production_limits(self, m, comp, meta): """ Determines the capacity limits of a unit's operation, in time. @@ -682,6 +719,7 @@ def _find_production_limits(self, m, comp, meta): mins.append(minimum) return caps, mins + def _create_transfer(self, m, comp, prod_name): """ Creates pyomo transfer function constraints @@ -706,6 +744,7 @@ def _create_transfer(self, m, comp, prod_name): constr = pyo.Constraint(m.T, rule=rule) setattr(m, rule_name, constr) + def _create_storage(self, m, comp, initial_storage, meta): """ Creates storage pyomo variable objects for a storage component @@ -758,6 +797,7 @@ def _create_storage(self, m, comp, initial_storage, meta): rule = lambda mod, t: prl.discharge_rule(discharge_name, bin_name, large_eps, r, mod, t) setattr(m, discharge_rule_name, pyo.Constraint(m.T, rule=rule)) + def _create_conservation(self, m, resources, initial_storage, meta): """ Creates pyomo conservation constraints @@ -772,6 +812,7 @@ def _create_conservation(self, m, resources, initial_storage, meta): constr = pyo.Constraint(m.T, rule=rule) setattr(m, f'{resource}_conservation', constr) + def _create_objective(self, meta, m): """ Creates pyomo objective function From e75c3035e02278a9351af133d1cdda059799f51d Mon Sep 17 00:00:00 2001 From: Dylan McDowell Date: Wed, 15 Nov 2023 13:30:06 -0700 Subject: [PATCH 04/24] Testing out PyomoModelHandler --- src/dispatch/PyomoModelHandler.py | 423 ++++++++++++++ src/dispatch/pyomo_dispatch.py | 887 ++++++++++++++++-------------- 2 files changed, 884 insertions(+), 426 deletions(-) create mode 100644 src/dispatch/PyomoModelHandler.py diff --git a/src/dispatch/PyomoModelHandler.py b/src/dispatch/PyomoModelHandler.py new file mode 100644 index 00000000..741ce9d9 --- /dev/null +++ b/src/dispatch/PyomoModelHandler.py @@ -0,0 +1,423 @@ + +import numpy as np +import pyomo.environ as pyo + +from . import PyomoRuleLibrary as prl +from . import putils +from .DispatchState import PyomoState + +class PyomoModelHandler: + + def __init__(self, time, time_offset, case, components, resources, initial_storage, meta) -> None: + self.time = time + self.time_offset = time_offset + self.case = case + self.components = components + # self.sources = sources + self.resources = resources + self.initial_storage = initial_storage + self.meta = meta + self.model = pyo.ConcreteModel() + + def build_model(self): + C = np.arange(0, len(self.components), dtype=int) # indexes component + R = np.arange(0, len(self.resources), dtype=int) # indexes resources + T = np.arange(0, len(self.time), dtype=int) # indexes resources + self.model.C = pyo.Set(initialize=C) + self.model.R = pyo.Set(initialize=R) + self.model.T = pyo.Set(initialize=T) + self.model.Times = self.time + self.model.time_offset = self.time_offset + self.model.resource_index_map = self.meta['HERON']['resource_indexer'] # maps the resource to its index WITHIN APPLICABLE components (sparse matrix) e.g. component: {resource: local index}, ... etc} + # properties + self.model.Case = self.case + self.model.Components = self.components + self.model.Activity = PyomoState() + self.model.Activity.initialize(self.model.Components, self.model.resource_index_map, self.model.Times, self.model) + + def populate_model(self): + for comp in self.components: + self._process_component(comp) + self._create_conservation() # conservation of resources (e.g. production == consumption) + self._create_objective() # objective function + + def _process_component(self, component): + interaction = component.get_interaction() + if interaction.is_governed(): + self._process_governed_component(component, interaction) + elif interaction.is_type("Storage"): + self._create_storage(component) + else: + self._create_production(component) + + def _process_governed_component(self, component, interaction): + """ + """ + self.meta["request"] = {"component": component, "time": self.time} + if interaction.is_type("Storage"): + self._process_storage_component(component, interaction) + else: + activity = interaction.get_strategy().evaluate(self.meta)[0]['level'] + self._create_production_param(component, activity) + + def _process_storage_component(self, m, component, interaction): + activity = interaction.get_strategy().evaluate(self.meta)[0]["level"] + self._create_production_param(m, component, activity, tag="level") + dt = self.model.Times[1] - self.model.Times[0] + rte2 = component.get_sqrt_RTE() + deltas = self._calculate_deltas(activity, interaction.get_initial_level(self.meta)) + charge = np.where(deltas > 0, -deltas / dt / rte2, 0) + discharge = np.where(deltas < 0, -deltas / dt * rte2, 0) + self._create_production_param(m, component, charge, tag="charge") + self._create_production_param(m, component, discharge, tag="discharge") + + ### PYOMO Element Constructors + def _create_production_limit(self, m, validation): + """ + Creates pyomo production constraint given validation errors + @ In, m, pyo.ConcreteModel, associated model + @ In, validation, dict, information from Validator about limit violation + @ Out, None + """ + # TODO could validator write a symbolic expression on request? That'd be sweet. + comp = validation['component'] + resource = validation['resource'] + r = m.resource_index_map[comp][resource] + t = validation['time_index'] + limit = validation['limit'] + limits = np.zeros(len(m.Times)) + limits[t] = limit + limit_type = validation['limit_type'] + prod_name = f'{comp.name}_production' + rule = lambda mod: prl.prod_limit_rule(prod_name, r, limits, limit_type, t, mod) + constr = pyo.Constraint(rule=rule) + counter = 1 + name_template = f'{comp.name}_{resource}_{t}_vld_limit_constr_{{i}}' + # make sure we get a unique name for this constraint + name = name_template.format(i=counter) + while getattr(m, name, None) is not None: + counter += 1 + name = name_template.format(i=counter) + setattr(m, name, constr) + print(f'DEBUGG added validation constraint "{name}"') + + + def _create_production_param(self, m, comp, values, tag=None): + """ + Creates production pyomo fixed parameter object for a component + @ In, m, pyo.ConcreteModel, associated model + @ In, comp, HERON Component, component to make production variables for + @ In, values, np.array(float), values to set for param + @ In, tag, str, optional, if not None then name will be component_[tag] + @ Out, prod_name, str, name of production variable + """ + name = comp.name + if tag is None: + tag = 'production' + # create pyomo indexer for this component's resources + res_indexer = pyo.Set(initialize=range(len(m.resource_index_map[comp]))) + setattr(m, f'{name}_res_index_map', res_indexer) + prod_name = f'{name}_{tag}' + init = (((0, t), values[t]) for t in m.T) + prod = pyo.Param(res_indexer, m.T, initialize=dict(init)) + setattr(m, prod_name, prod) + return prod_name + + + def _create_production(self, m, comp, meta): + """ + Creates all pyomo variable objects for a non-storage component + @ In, m, pyo.ConcreteModel, associated model + @ In, comp, HERON Component, component to make production variables for + @ In, meta, dict, dictionary of state variables + @ Out, None + """ + prod_name = self._create_production_variable(m, comp, meta) + ## if you cannot set limits directly in the production variable, set separate contraint: + ## Method 1: set variable bounds directly --> TODO more work needed, but would be nice + # lower, upper = self._get_prod_bounds(m, comp) + # limits should be None unless specified, so use "getters" from dictionaries + # bounds = lambda m, r, t: (lower.get(r, None), upper.get(r, None)) + ## Method 2: set variable bounds directly --> TODO more work needed, but would be nice + # self._create_capacity(m, comp, prod_name, meta) # capacity constraints + # transfer function governs input -> output relationship + self._create_transfer(m, comp, prod_name) + # ramp rates + if comp.ramp_limit is not None: + self._create_ramp_limit(m, comp, prod_name, meta) + return prod_name + + + def _create_production_variable(self, m, comp, meta, tag=None, add_bounds=True, **kwargs): + """ + Creates production pyomo variable object for a component + @ In, m, pyo.ConcreteModel, associated model + @ In, comp, HERON Component, component to make production variables for + @ In, tag, str, optional, if not None then name will be component_[tag]; otherwise "production" + @ In, add_bounds, bool, optional, if True then determine and set bounds for variable + @ In, kwargs, dict, optional, passalong kwargs to pyomo variable + @ Out, prod_name, str, name of production variable + """ + if tag is None: + tag = 'production' + name = comp.name + cap_res = comp.get_capacity_var() # name of resource that defines capacity + limit_r = m.resource_index_map[comp][cap_res] # production index of the governing resource + # create pyomo indexer for this component's resources + indexer_name = f'{name}_res_index_map' + indexer = getattr(m, indexer_name, None) + if indexer is None: + indexer = pyo.Set(initialize=range(len(m.resource_index_map[comp]))) + setattr(m, indexer_name, indexer) + prod_name = f'{name}_{tag}' + caps, mins = self._find_production_limits(m, comp, meta) + if min(caps) < 0: + # quick check that capacities signs are consistent #FIXME: revisit, this is an assumption + assert max(caps) <= 0, \ + 'Capacities are inconsistent: mix of positive and negative values not currently supported.' + # we have a unit that's consuming, so we need to flip the variables to be sensible + mins, caps = caps, mins + inits = caps + else: + inits = mins + if add_bounds: + # create bounds based in min, max operation + bounds = lambda m, r, t: (mins[t] if r == limit_r else None, caps[t] if r == limit_r else None) + initial = lambda m, r, t: inits[t] if r == limit_r else 0 + else: + bounds = (None, None) + initial = 0 + # production variable depends on resources, time + #FIXME initials! Should be lambda with mins for tracking var! + prod = pyo.Var(indexer, m.T, initialize=initial, bounds=bounds, **kwargs) + # TODO it may be that we need to set variable values to avoid problems in some solvers. + # if comp.is_dispatchable() == 'fixed': + # for t, _ in enumerate(m.Times): + # prod[limit_r, t].fix(caps[t]) + setattr(m, prod_name, prod) + return prod_name + + + def _create_ramp_limit(self, m, comp, prod_name, meta): + """ + Creates ramping limitations for a producing component + @ In, m, pyo.ConcreteModel, associated model + @ In, comp, HERON Component, component to make ramping limits for + @ In, prod_name, str, name of production variable + @ In, meta, dict, dictionary of state variables + @ Out, None + """ + # ramping is defined in terms of the capacity variable + cap_res = comp.get_capacity_var() # name of resource that defines capacity + cap = comp.get_capacity(meta)[0][cap_res] + r = m.resource_index_map[comp][cap_res] # production index of the governing resource + # NOTE: this includes the built capacity * capacity factor, if any, which assumes + # the ramp rate depends on the available capacity, not the built capacity. + limit_delta = comp.ramp_limit * cap # NOTE: if cap is negative, then this is negative. + if limit_delta < 0: + neg_cap = True + else: + neg_cap = False + # if we're limiting ramp frequency, make vars and rules for that + if comp.ramp_freq: + # create binaries for tracking ramping + up = pyo.Var(m.T, initialize=0, domain=pyo.Binary) + down = pyo.Var(m.T, initialize=0, domain=pyo.Binary) + steady = pyo.Var(m.T, initialize=1, domain=pyo.Binary) + setattr(m, f'{comp.name}_up_ramp_tracker', up) + setattr(m, f'{comp.name}_down_ramp_tracker', down) + setattr(m, f'{comp.name}_steady_ramp_tracker', steady) + ramp_trackers = (down, up, steady) + else: + ramp_trackers = None + # limit production changes when ramping down + ramp_rule_down = lambda mod, t: prl.ramp_rule_down(prod_name, r, limit_delta, neg_cap, t, mod, bins=ramp_trackers) + constr = pyo.Constraint(m.T, rule=ramp_rule_down) + setattr(m, f'{comp.name}_ramp_down_constr', constr) + # limit production changes when ramping up + ramp_rule_up = lambda mod, t: prl.ramp_rule_up(prod_name, r, limit_delta, neg_cap, t, mod, bins=ramp_trackers) + constr = pyo.Constraint(m.T, rule=ramp_rule_up) + setattr(m, f'{comp.name}_ramp_up_constr', constr) + # if ramping frequency limit, impose binary constraints + if comp.ramp_freq: + # binaries rule, for exclusive choice up/down/steady + binaries_rule = lambda mod, t: prl.ramp_freq_bins_rule(down, up, steady, t, mod) + constr = pyo.Constraint(m.T, rule=binaries_rule) + setattr(m, f'{comp.name}_ramp_freq_binaries', constr) + # limit frequency of ramping + # TODO calculate "tao" window using ramp freq and dt + # -> for now, just use the integer for number of windows + freq_rule = lambda mod, t: prl.ramp_freq_rule(down, up, comp.ramp_freq, t, m) + constr = pyo.Constraint(m.T, rule=freq_rule) + setattr(m, f'{comp.name}_ramp_freq_constr', constr) + + + def _create_capacity_constraints(self, m, comp, prod_name, meta): + """ + Creates pyomo capacity constraints + @ In, m, pyo.ConcreteModel, associated model + @ In, comp, HERON Component, component to make variables for + @ In, prod_name, str, name of production variable + @ In, meta, dict, additional state information + @ Out, None + """ + cap_res = comp.get_capacity_var() # name of resource that defines capacity + r = m.resource_index_map[comp][cap_res] # production index of the governing resource + caps, mins = self._find_production_limits(m, comp, meta) + # capacity + max_rule = lambda mod, t: prl.capacity_rule(prod_name, r, caps, mod, t) + constr = pyo.Constraint(m.T, rule=max_rule) + setattr(m, f'{comp.name}_{cap_res}_capacity_constr', constr) + # minimum + min_rule = lambda mod, t: prl.min_prod_rule(prod_name, r, caps, mins, mod, t) + constr = pyo.Constraint(m.T, rule=min_rule) + # set initial conditions + for t, time in enumerate(m.Times): + cap = caps[t] + if cap == mins[t]: + # initialize values so there's no boundary errors + var = getattr(m, prod_name) + values = var.get_values() + for k in values: + values[k] = cap + var.set_values(values) + setattr(m, f'{comp.name}_{cap_res}_minprod_constr', constr) + + + def _find_production_limits(self, m, comp, meta): + """ + Determines the capacity limits of a unit's operation, in time. + @ In, m, pyo.ConcreteModel, associated model + @ In, comp, HERON Component, component to make variables for + @ In, meta, dict, additional state information + @ Out, caps, array, max production values by time + @ Out, mins, array, min production values by time + """ + cap_res = comp.get_capacity_var() # name of resource that defines capacity + r = m.resource_index_map[comp][cap_res] # production index of the governing resource + # production is always lower than capacity + ## NOTE get_capacity returns (data, meta) and data is dict + ## TODO does this work with, e.g., ARMA-based capacities? + ### -> "time" is stored on "m" and could be used to correctly evaluate the capacity + caps = [] + mins = [] + for t, time in enumerate(m.Times): + meta['HERON']['time_index'] = t + m.time_offset + cap = comp.get_capacity(meta)[0][cap_res] # value of capacity limit (units of governing resource) + caps.append(cap) + if (comp.is_dispatchable() == 'fixed'): + minimum = cap + else: + minimum = comp.get_minimum(meta)[0][cap_res] + mins.append(minimum) + return caps, mins + + + def _create_transfer(self, m, comp, prod_name): + """ + Creates pyomo transfer function constraints + @ In, m, pyo.ConcreteModel, associated model + @ In, comp, HERON Component, component to make variables for + @ In, prod_name, str, name of production variable + @ Out, None + """ + name = comp.name + # transfer functions + # e.g. 2A + 3B -> 1C + 2E + # get linear coefficients + # TODO this could also take a transfer function from an external Python function assuming + # we're careful about how the expression-vs-float gets used + # and figure out how to handle multiple ins, multiple outs + ratios = putils.get_transfer_coeffs(m, comp) + ref_r, ref_name, _ = ratios.pop('__reference', (None, None, None)) + for resource, ratio in ratios.items(): + r = m.resource_index_map[comp][resource] + rule_name = f'{name}_{resource}_{ref_name}_transfer' + rule = lambda mod, t: prl.transfer_rule(ratio, r, ref_r, prod_name, mod, t) + constr = pyo.Constraint(m.T, rule=rule) + setattr(m, rule_name, constr) + + + def _create_storage(self, m, comp, initial_storage, meta): + """ + Creates storage pyomo variable objects for a storage component + Similar to create_production, but for storages + @ In, m, pyo.ConcreteModel, associated model + @ In, comp, HERON Component, component to make production variables for + @ In, initial_storage, dict, initial storage levels + @ In, meta, dict, additional state information + @ Out, level_name, str, name of storage level variable + """ + prefix = comp.name + # what resource index? Isn't it always 0? # assumption + r = 0 # NOTE this is only true if each storage ONLY uses 1 resource + # storages require a few variables: + # (1) a level tracker, + level_name = self._create_production_variable(m, comp, meta, tag='level') + # -> set operational limits + # self._create_capacity(m, comp, level_name, meta) + # (2, 3) separate charge/discharge trackers, so we can implement round-trip efficiency and ramp rates + charge_name = self._create_production_variable(m, comp, meta, tag='charge', add_bounds=False, within=pyo.NonPositiveReals) + discharge_name = self._create_production_variable(m, comp, meta, tag='discharge', add_bounds=False, within=pyo.NonNegativeReals) + # balance level, charge/discharge + level_rule_name = prefix + '_level_constr' + rule = lambda mod, t: prl.level_rule(comp, level_name, charge_name, discharge_name, initial_storage, r, mod, t) + setattr(m, level_rule_name, pyo.Constraint(m.T, rule=rule)) + # periodic boundary condition for storage level + if comp.get_interaction().apply_periodic_level: + periodic_rule_name = prefix + '_level_periodic_constr' + rule = lambda mod, t: prl.periodic_level_rule(comp, level_name, initial_storage, r, mod, t) + setattr(m, periodic_rule_name, pyo.Constraint(m.T, rule=rule)) + + # (4) a binary variable to track whether we're charging or discharging, to prevent BOTH happening + # -> 0 is charging, 1 is discharging + # -> TODO make this a user-based option to disable, if they want to allow dual operation + # -> -> but they should really think about if that's what they want! + # FIXME currently introducing the bigM strategy also makes solves numerically unstable, + # and frequently results in spurious errors. For now, disable it. + allow_both = True # allow simultaneous charging and discharging + if not allow_both: + bin_name = self._create_production_variable(m, comp, meta, tag='dcforcer', add_bounds=False, within=pyo.Binary) + # we need a large epsilon, but not so large that addition stops making sense + # -> we don't know what any values for this component will be! How do we choose? + # -> NOTE that choosing this value has VAST impact on solve stability!! + large_eps = 1e8 #0.01 * sys.float_info.max + # charging constraint: don't charge while discharging (note the sign matters) + charge_rule_name = prefix + '_charge_constr' + rule = lambda mod, t: prl.charge_rule(charge_name, bin_name, large_eps, r, mod, t) + setattr(m, charge_rule_name, pyo.Constraint(m.T, rule=rule)) + discharge_rule_name = prefix + '_discharge_constr' + rule = lambda mod, t: prl.discharge_rule(discharge_name, bin_name, large_eps, r, mod, t) + setattr(m, discharge_rule_name, pyo.Constraint(m.T, rule=rule)) + + + def _create_conservation(self, m, resources): + """ + Creates pyomo conservation constraints + @ In, m, pyo.ConcreteModel, associated model + @ In, resources, list, list of resources in problem + @ In, initial_storage, dict, initial storage levels + @ In, meta, dict, dictionary of state variables + @ Out, None + """ + for resource in resources: + rule = lambda mod, t: prl.conservation_rule(resource, mod, t) + constr = pyo.Constraint(m.T, rule=rule) + setattr(m, f'{resource}_conservation', constr) + + + def _create_objective(self, meta, m): + """ + Creates pyomo objective function + @ In, meta, dict, additional variables to pass through + @ In, m, pyo.ConcreteModel, associated model + @ Out, None + """ + # cashflow eval + rule = lambda mod: prl.cashflow_rule(self._compute_cashflows, meta, mod) + m.obj = pyo.Objective(rule=rule, sense=pyo.maximize) + + + + + \ No newline at end of file diff --git a/src/dispatch/pyomo_dispatch.py b/src/dispatch/pyomo_dispatch.py index 003e9016..44d7de51 100644 --- a/src/dispatch/pyomo_dispatch.py +++ b/src/dispatch/pyomo_dispatch.py @@ -16,6 +16,7 @@ from . import putils from . import PyomoRuleLibrary as prl +from .PyomoModelHandler import PyomoModelHandler from .Dispatcher import Dispatcher from .DispatchState import NumpyState, PyomoState @@ -64,27 +65,50 @@ def get_input_specs(cls): @ In, None @ Out, specs, InputData, specs """ - specs = InputData.parameterInputFactory('pyomo', ordered=False, baseNode=None, - descr=r"""The \texttt{pyomo} dispatcher uses analytic modeling and rolling windows to - solve dispatch optimization with perfect information via the pyomo optimization library.""") - specs.addSub(InputData.parameterInputFactory('rolling_window_length', contentType=InputTypes.IntegerType, - descr=r"""Sets the length of the rolling window that the Pyomo optimization algorithm - uses to break down histories. Longer window lengths will minimize boundary effects, such as - nonoptimal storage dispatch, at the cost of slower optimization solves. - Note that if the rolling window results in a window of length 1 (such as at the end of a history), - this can cause problems for pyomo. - \default{24}""")) - specs.addSub(InputData.parameterInputFactory('debug_mode', contentType=InputTypes.BoolType, - descr=r"""Enables additional printing in the pyomo dispatcher. Highly discouraged for production runs. - \default{False}.""")) - specs.addSub(InputData.parameterInputFactory('solver', contentType=InputTypes.StringType, - descr=r"""Indicates which solver should be used by pyomo. Options depend on individual installation. - \default{'glpk' for Windows, 'cbc' otherwise}.""")) - specs.addSub(InputData.parameterInputFactory('tol', contentType=InputTypes.FloatType, - descr=r"""Relative tolerance for converging final optimal dispatch solutions. Specific implementation - depends on the solver selected. Changing this value could have significant impacts on the - dispatch optimization time and quality. - \default{solver dependent, often 1e-6}.""")) + specs = InputData.parameterInputFactory( + 'pyomo', ordered=False, baseNode=None, + descr=r"""The \texttt{pyomo} dispatcher uses analytic modeling and rolling + windows to solve dispatch optimization with perfect information via the + pyomo optimization library.""" + ) + + specs.addSub( + InputData.parameterInputFactory( + 'rolling_window_length', contentType=InputTypes.IntegerType, + descr=r"""Sets the length of the rolling window that the Pyomo optimization + algorithm uses to break down histories. Longer window lengths will minimize + boundary effects, such as nonoptimal storage dispatch, at the cost of slower + optimization solves. Note that if the rolling window results in a window + of length 1 (such as at the end of a history), this can cause problems for pyomo. + \default{24}""" + ) + ) + + specs.addSub( + InputData.parameterInputFactory( + 'debug_mode', contentType=InputTypes.BoolType, + descr=r"""Enables additional printing in the pyomo dispatcher. + Highly discouraged for production runs. \default{False}.""" + ) + ) + + specs.addSub( + InputData.parameterInputFactory( + 'solver', contentType=InputTypes.StringType, + descr=r"""Indicates which solver should be used by pyomo. Options depend + on individual installation. \default{'glpk' for Windows, 'cbc' otherwise}.""" + ) + ) + + specs.addSub( + InputData.parameterInputFactory( + 'tol', contentType=InputTypes.FloatType, + descr=r"""Relative tolerance for converging final optimal dispatch solutions. + Specific implementation depends on the solver selected. Changing this value + could have significant impacts on the dispatch optimization time and quality. + \default{solver dependent, often 1e-6}.""" + ) + ) # TODO specific for pyomo dispatcher return specs @@ -104,7 +128,7 @@ def __init__(self): self._picard_limit = 10 # iterative solve limit - def read_input(self, specs): + def read_input(self, specs) -> None: """ Read in input specifications. @ In, specs, RAVEN InputData, specifications @@ -140,30 +164,29 @@ def read_input(self, specs): raise ValueError(f"Tolerance setting not available for solver '{self._solver}'.") - def _check_solver_availability(self, requested_solver): + def _check_solver_availability(self, requested_solver: str) -> str: """ + Check if any of the requested solvers are available. If not, display available options. + @ In, requested_solver, str, requested solver (e.g. 'cbc', 'glpk', 'ipopt') + @ Out, solver, str, name of solver that is available to use. """ solvers_to_check = SOLVERS if requested_solver is None else [requested_solver] for solver in solvers_to_check: if self._is_solver_available(solver): return solver - available_sovers = self._get_available_solvers() + all_options = pyo.SolverFactory._cls.keys() + available_solvers = [op for op in all_options if not op.startswith('_') and self._is_solver_available(op)] raise RuntimeError( - f'Requested solver "{requested_solver} not found. Available options may include: {available_sovers}.' + f'Requested solver "{requested_solver} not found. Available options may include: {available_solvers}.' ) - def _get_available_solvers(self): - """ - """ - all_options = pyo.SolverFactory._cls.keys() # TODO shorten to list of tested options? - available = [op for op in all_options if not op.startswith('_') and self._is_solver_available(op)] - return available - - - def _is_solver_available(self, solver): + def _is_solver_available(self, solver: str) -> bool: """ + Check if specified soler is available on the system. + @ In, solver, str, name of solver to check. + @ Out, is_available, bool, True if solver is available. """ try: return pyo.SolverFactory(solver).available() @@ -193,9 +216,11 @@ def dispatch(self, case, components, sources, meta): while start_index < final_index: end_index = min(start_index + self._window_len, final_index) - self._validate_window_length(start_index, end_index) + if end_index - start_index == 1: + raise DispatchError("Window length of 1 detected, which is not supported.") specific_time = time[start_index:end_index] + print(f"Start: {start_index} End: {end_index}") subdisp, solve_time = self._handle_dispatch_window_solve( specific_time, start_index, case, components, sources, resources, initial_levels, meta ) @@ -208,36 +233,40 @@ def dispatch(self, case, components, sources, meta): @staticmethod - def _get_initial_storage_levels(components, meta, start_index): + def _get_initial_storage_levels(components: list, meta: dict, start_index: int) -> dict: """ - + Return initial storage levels for 'Storage' component types. + @ In, components, list, HERON components available to the dispatch. + @ In, meta, dict, additional variables passed through. + @ In, start_index, int, index of the start of the window. + @ Out, initial_levels, dict, initial storage levels for 'Storage' component types. """ initial_levels = {} for comp in components: - if comp.get_interaction().is_type('Storage'): - if start_index == 0: - initial_levels[comp] = comp.get_interaction().get_initial_level(meta) - # NOTE: There used to be an else conditional here that depended on the - # variable `subdisp` which was not defined yet. Leaving an unreachable - # branch of code, thus, I removed it. So currently, this function assumes - # start_index will always be zero, otherwise it will return an empty dict. - # Here was the line in case we need it in the future: - # else: initial_levels[comp] = subdisp[comp.name]['level'][comp.get_interaction().get_resource()][-1] + if comp.get_interaction().is_type('Storage'): + if start_index == 0: + initial_levels[comp] = comp.get_interaction().get_initial_level(meta) + # NOTE: There used to be an else conditional here that depended on the + # variable `subdisp` which was not defined yet. Leaving an unreachable + # branch of code, thus, I removed it. So currently, this function assumes + # start_index will always be zero, otherwise it will return an empty dict. + # Here was the line in case we need it in the future: + # else: initial_levels[comp] = subdisp[comp.name]['level'][comp.get_interaction().get_resource()][-1] return initial_levels - @staticmethod - def _validate_window_length(start_index, end_index): - """ - - """ - if end_index - start_index == 1: - raise DispatchError("Window length of 1 detected, which is not supported.") - - def _handle_dispatch_window_solve(self, specific_time, start_index, case, components, sources, resources, initial_levels, meta): """ - + Set up convergence criteria and collect results from a dispatch window solve. + @ In, specific_time, np.array, value of time to evaluate. + @ In, start_index, int, index of the start of the window. + @ In, case, HERON Case, Case that this dispatch is part of. + @ In, components, list, HERON components available to the dispatch. + @ In, sources, list, HERON source (placeholders) for signals. + @ In, resources, list, sorted list of all resources in problem. + @ In, initial_levels, dict, initial storage levels if any. + @ In, meta, dict, additional variables passed through. + @ Out, subdisp, dict, results of window dispatch. """ start = time_mod.time() subdisp = self.dispatch_window(specific_time, start_index, case, components, sources, resources, initial_levels, meta) @@ -266,14 +295,22 @@ def _handle_dispatch_window_solve(self, specific_time, start_index, case, compon return subdisp, solve_time - def _store_results_in_dispatch(self, dispatch, subdisp, components, start_index, end_index): + def _store_results_in_dispatch(self, dispatch, subdisp, components, start_index, end_index) -> None: """ - + Store results from a dispatch window in the overall dispatch container. + @ In, dispatch, DispatchScenario, resulting dispatch. + @ In, subdisp, dict, results of window dispatch. + @ In, components, list, HERON components available to the dispatch. + @ In, start_index, int, index of the start of the window. + @ In, end_index, int, index of the end of the window. + @ Out, None """ for comp in components: - for tag in comp.get_tracking_vars(): - for res, values in subdisp[comp.name][tag].items(): - dispatch.set_activity_vector(comp, res, values, tracker=tag, start_idx=start_index, end_idx=end_index) + for tag in comp.get_tracking_vars(): + for res, values in subdisp[comp.name][tag].items(): + dispatch.set_activity_vector( + comp, res, values, tracker=tag, start_idx=start_index, end_idx=end_index + ) def check_converged(self, new, old, components, tol=1e-4): @@ -378,25 +415,21 @@ def _build_pyomo_model(self, time, time_offset, case, components, resources, met """ # build the Pyomo model # TODO abstract this model as much as possible BEFORE, then concrete initialization per window - m = pyo.ConcreteModel() + model = pyo.ConcreteModel() # indices - C = np.arange(0, len(components), dtype=int) # indexes component - R = np.arange(0, len(resources), dtype=int) # indexes resources - # T = np.arange(start_index, end_index, dtype=int) # indexes resources - T = np.arange(0, len(time), dtype=int) # indexes resources - m.C = pyo.Set(initialize=C) - m.R = pyo.Set(initialize=R) - m.T = pyo.Set(initialize=T) - m.Times = time - m.time_offset = time_offset - m.resource_index_map = meta['HERON']['resource_indexer'] # maps the resource to its index WITHIN APPLICABLE components (sparse matrix) + model.C = pyo.Set(initialize=np.arange(0, len(components), dtype=int)) + model.R = pyo.Set(initialize=np.arange(0, len(resources), dtype=int)) + model.T = pyo.Set(initialize=np.arange(0, len(time), dtype=int)) + model.Times = time + model.time_offset = time_offset + model.resource_index_map = meta['HERON']['resource_indexer'] # maps the resource to its index WITHIN APPLICABLE components (sparse matrix) # e.g. component: {resource: local index}, ... etc} # properties - m.Case = case - m.Components = components - m.Activity = PyomoState() - m.Activity.initialize(m.Components, m.resource_index_map, m.Times, m) - return m + model.Case = case + model.Components = components + model.Activity = PyomoState() + model.Activity.initialize(model.Components, model.resource_index_map, model.Times, model) + return model def _populate_pyomo_model(self, model, components, initial_storage, time, resources, meta): @@ -405,7 +438,7 @@ def _populate_pyomo_model(self, model, components, initial_storage, time, resour # constraints and variables for comp in components: self._process_components(model, comp, time, initial_storage, meta) - self._create_conservation(model, resources, initial_storage, meta) # conservation of resources (e.g. production == consumption) + self._create_conservation(model, resources) # conservation of resources (e.g. production == consumption) self._create_objective(meta, model) # objective @@ -422,9 +455,11 @@ def dispatch_window(self, time, time_offset, case, components, sources, resource @ In, meta, dict, additional variables passed through @ Out, result, dict, results of window dispatch """ - model = self._build_pyomo_model(time, time_offset, case, components, resources, meta) - self._populate_pyomo_model(model, components, initial_storage, time, resources, meta) - result = self._solve_dispatch(model, meta) + model = PyomoModelHandler(time, time_offset, case, components, resources, initial_storage, meta) + # model = self._build_pyomo_model(time, time_offset, case, components, resources, meta) + model.populate_model() + # self._populate_pyomo_model(model, components, initial_storage, time, resources, meta) + result = self._solve_dispatch(model.model, meta) return result @@ -478,348 +513,348 @@ def _solve_dispatch(self, m, meta): return result - ### PYOMO Element Constructors - def _create_production_limit(self, m, validation): - """ - Creates pyomo production constraint given validation errors - @ In, m, pyo.ConcreteModel, associated model - @ In, validation, dict, information from Validator about limit violation - @ Out, None - """ - # TODO could validator write a symbolic expression on request? That'd be sweet. - comp = validation['component'] - resource = validation['resource'] - r = m.resource_index_map[comp][resource] - t = validation['time_index'] - limit = validation['limit'] - limits = np.zeros(len(m.Times)) - limits[t] = limit - limit_type = validation['limit_type'] - prod_name = f'{comp.name}_production' - rule = lambda mod: prl.prod_limit_rule(prod_name, r, limits, limit_type, t, mod) - constr = pyo.Constraint(rule=rule) - counter = 1 - name_template = f'{comp.name}_{resource}_{t}_vld_limit_constr_{{i}}' - # make sure we get a unique name for this constraint - name = name_template.format(i=counter) - while getattr(m, name, None) is not None: - counter += 1 - name = name_template.format(i=counter) - setattr(m, name, constr) - print(f'DEBUGG added validation constraint "{name}"') - - - def _create_production_param(self, m, comp, values, tag=None): - """ - Creates production pyomo fixed parameter object for a component - @ In, m, pyo.ConcreteModel, associated model - @ In, comp, HERON Component, component to make production variables for - @ In, values, np.array(float), values to set for param - @ In, tag, str, optional, if not None then name will be component_[tag] - @ Out, prod_name, str, name of production variable - """ - name = comp.name - if tag is None: - tag = 'production' - # create pyomo indexer for this component's resources - res_indexer = pyo.Set(initialize=range(len(m.resource_index_map[comp]))) - setattr(m, f'{name}_res_index_map', res_indexer) - prod_name = f'{name}_{tag}' - init = (((0, t), values[t]) for t in m.T) - prod = pyo.Param(res_indexer, m.T, initialize=dict(init)) - setattr(m, prod_name, prod) - return prod_name - - - def _create_production(self, m, comp, meta): - """ - Creates all pyomo variable objects for a non-storage component - @ In, m, pyo.ConcreteModel, associated model - @ In, comp, HERON Component, component to make production variables for - @ In, meta, dict, dictionary of state variables - @ Out, None - """ - prod_name = self._create_production_variable(m, comp, meta) - ## if you cannot set limits directly in the production variable, set separate contraint: - ## Method 1: set variable bounds directly --> TODO more work needed, but would be nice - # lower, upper = self._get_prod_bounds(m, comp) - # limits should be None unless specified, so use "getters" from dictionaries - # bounds = lambda m, r, t: (lower.get(r, None), upper.get(r, None)) - ## Method 2: set variable bounds directly --> TODO more work needed, but would be nice - # self._create_capacity(m, comp, prod_name, meta) # capacity constraints - # transfer function governs input -> output relationship - self._create_transfer(m, comp, prod_name) - # ramp rates - if comp.ramp_limit is not None: - self._create_ramp_limit(m, comp, prod_name, meta) - return prod_name - - - def _create_production_variable(self, m, comp, meta, tag=None, add_bounds=True, **kwargs): - """ - Creates production pyomo variable object for a component - @ In, m, pyo.ConcreteModel, associated model - @ In, comp, HERON Component, component to make production variables for - @ In, tag, str, optional, if not None then name will be component_[tag]; otherwise "production" - @ In, add_bounds, bool, optional, if True then determine and set bounds for variable - @ In, kwargs, dict, optional, passalong kwargs to pyomo variable - @ Out, prod_name, str, name of production variable - """ - if tag is None: - tag = 'production' - name = comp.name - cap_res = comp.get_capacity_var() # name of resource that defines capacity - limit_r = m.resource_index_map[comp][cap_res] # production index of the governing resource - # create pyomo indexer for this component's resources - indexer_name = f'{name}_res_index_map' - indexer = getattr(m, indexer_name, None) - if indexer is None: - indexer = pyo.Set(initialize=range(len(m.resource_index_map[comp]))) - setattr(m, indexer_name, indexer) - prod_name = f'{name}_{tag}' - caps, mins = self._find_production_limits(m, comp, meta) - if min(caps) < 0: - # quick check that capacities signs are consistent #FIXME: revisit, this is an assumption - assert max(caps) <= 0, \ - 'Capacities are inconsistent: mix of positive and negative values not currently supported.' - # we have a unit that's consuming, so we need to flip the variables to be sensible - mins, caps = caps, mins - inits = caps - else: - inits = mins - if add_bounds: - # create bounds based in min, max operation - bounds = lambda m, r, t: (mins[t] if r == limit_r else None, caps[t] if r == limit_r else None) - initial = lambda m, r, t: inits[t] if r == limit_r else 0 - else: - bounds = (None, None) - initial = 0 - # production variable depends on resources, time - #FIXME initials! Should be lambda with mins for tracking var! - prod = pyo.Var(indexer, m.T, initialize=initial, bounds=bounds, **kwargs) - # TODO it may be that we need to set variable values to avoid problems in some solvers. - # if comp.is_dispatchable() == 'fixed': - # for t, _ in enumerate(m.Times): - # prod[limit_r, t].fix(caps[t]) - setattr(m, prod_name, prod) - return prod_name - - - def _create_ramp_limit(self, m, comp, prod_name, meta): - """ - Creates ramping limitations for a producing component - @ In, m, pyo.ConcreteModel, associated model - @ In, comp, HERON Component, component to make ramping limits for - @ In, prod_name, str, name of production variable - @ In, meta, dict, dictionary of state variables - @ Out, None - """ - # ramping is defined in terms of the capacity variable - cap_res = comp.get_capacity_var() # name of resource that defines capacity - cap = comp.get_capacity(meta)[0][cap_res] - r = m.resource_index_map[comp][cap_res] # production index of the governing resource - # NOTE: this includes the built capacity * capacity factor, if any, which assumes - # the ramp rate depends on the available capacity, not the built capacity. - limit_delta = comp.ramp_limit * cap # NOTE: if cap is negative, then this is negative. - if limit_delta < 0: - neg_cap = True - else: - neg_cap = False - # if we're limiting ramp frequency, make vars and rules for that - if comp.ramp_freq: - # create binaries for tracking ramping - up = pyo.Var(m.T, initialize=0, domain=pyo.Binary) - down = pyo.Var(m.T, initialize=0, domain=pyo.Binary) - steady = pyo.Var(m.T, initialize=1, domain=pyo.Binary) - setattr(m, f'{comp.name}_up_ramp_tracker', up) - setattr(m, f'{comp.name}_down_ramp_tracker', down) - setattr(m, f'{comp.name}_steady_ramp_tracker', steady) - ramp_trackers = (down, up, steady) - else: - ramp_trackers = None - # limit production changes when ramping down - ramp_rule_down = lambda mod, t: prl.ramp_rule_down(prod_name, r, limit_delta, neg_cap, t, mod, bins=ramp_trackers) - constr = pyo.Constraint(m.T, rule=ramp_rule_down) - setattr(m, f'{comp.name}_ramp_down_constr', constr) - # limit production changes when ramping up - ramp_rule_up = lambda mod, t: prl.ramp_rule_up(prod_name, r, limit_delta, neg_cap, t, mod, bins=ramp_trackers) - constr = pyo.Constraint(m.T, rule=ramp_rule_up) - setattr(m, f'{comp.name}_ramp_up_constr', constr) - # if ramping frequency limit, impose binary constraints - if comp.ramp_freq: - # binaries rule, for exclusive choice up/down/steady - binaries_rule = lambda mod, t: prl.ramp_freq_bins_rule(down, up, steady, t, mod) - constr = pyo.Constraint(m.T, rule=binaries_rule) - setattr(m, f'{comp.name}_ramp_freq_binaries', constr) - # limit frequency of ramping - # TODO calculate "tao" window using ramp freq and dt - # -> for now, just use the integer for number of windows - freq_rule = lambda mod, t: prl.ramp_freq_rule(down, up, comp.ramp_freq, t, m) - constr = pyo.Constraint(m.T, rule=freq_rule) - setattr(m, f'{comp.name}_ramp_freq_constr', constr) - - - def _create_capacity_constraints(self, m, comp, prod_name, meta): - """ - Creates pyomo capacity constraints - @ In, m, pyo.ConcreteModel, associated model - @ In, comp, HERON Component, component to make variables for - @ In, prod_name, str, name of production variable - @ In, meta, dict, additional state information - @ Out, None - """ - cap_res = comp.get_capacity_var() # name of resource that defines capacity - r = m.resource_index_map[comp][cap_res] # production index of the governing resource - caps, mins = self._find_production_limits(m, comp, meta) - # capacity - max_rule = lambda mod, t: prl.capacity_rule(prod_name, r, caps, mod, t) - constr = pyo.Constraint(m.T, rule=max_rule) - setattr(m, f'{comp.name}_{cap_res}_capacity_constr', constr) - # minimum - min_rule = lambda mod, t: prl.min_prod_rule(prod_name, r, caps, mins, mod, t) - constr = pyo.Constraint(m.T, rule=min_rule) - # set initial conditions - for t, time in enumerate(m.Times): - cap = caps[t] - if cap == mins[t]: - # initialize values so there's no boundary errors - var = getattr(m, prod_name) - values = var.get_values() - for k in values: - values[k] = cap - var.set_values(values) - setattr(m, f'{comp.name}_{cap_res}_minprod_constr', constr) - - - def _find_production_limits(self, m, comp, meta): - """ - Determines the capacity limits of a unit's operation, in time. - @ In, m, pyo.ConcreteModel, associated model - @ In, comp, HERON Component, component to make variables for - @ In, meta, dict, additional state information - @ Out, caps, array, max production values by time - @ Out, mins, array, min production values by time - """ - cap_res = comp.get_capacity_var() # name of resource that defines capacity - r = m.resource_index_map[comp][cap_res] # production index of the governing resource - # production is always lower than capacity - ## NOTE get_capacity returns (data, meta) and data is dict - ## TODO does this work with, e.g., ARMA-based capacities? - ### -> "time" is stored on "m" and could be used to correctly evaluate the capacity - caps = [] - mins = [] - for t, time in enumerate(m.Times): - meta['HERON']['time_index'] = t + m.time_offset - cap = comp.get_capacity(meta)[0][cap_res] # value of capacity limit (units of governing resource) - caps.append(cap) - if (comp.is_dispatchable() == 'fixed'): - minimum = cap - else: - minimum = comp.get_minimum(meta)[0][cap_res] - mins.append(minimum) - return caps, mins - - - def _create_transfer(self, m, comp, prod_name): - """ - Creates pyomo transfer function constraints - @ In, m, pyo.ConcreteModel, associated model - @ In, comp, HERON Component, component to make variables for - @ In, prod_name, str, name of production variable - @ Out, None - """ - name = comp.name - # transfer functions - # e.g. 2A + 3B -> 1C + 2E - # get linear coefficients - # TODO this could also take a transfer function from an external Python function assuming - # we're careful about how the expression-vs-float gets used - # and figure out how to handle multiple ins, multiple outs - ratios = putils.get_transfer_coeffs(m, comp) - ref_r, ref_name, _ = ratios.pop('__reference', (None, None, None)) - for resource, ratio in ratios.items(): - r = m.resource_index_map[comp][resource] - rule_name = f'{name}_{resource}_{ref_name}_transfer' - rule = lambda mod, t: prl.transfer_rule(ratio, r, ref_r, prod_name, mod, t) - constr = pyo.Constraint(m.T, rule=rule) - setattr(m, rule_name, constr) - - - def _create_storage(self, m, comp, initial_storage, meta): - """ - Creates storage pyomo variable objects for a storage component - Similar to create_production, but for storages - @ In, m, pyo.ConcreteModel, associated model - @ In, comp, HERON Component, component to make production variables for - @ In, initial_storage, dict, initial storage levels - @ In, meta, dict, additional state information - @ Out, level_name, str, name of storage level variable - """ - prefix = comp.name - # what resource index? Isn't it always 0? # assumption - r = 0 # NOTE this is only true if each storage ONLY uses 1 resource - # storages require a few variables: - # (1) a level tracker, - level_name = self._create_production_variable(m, comp, meta, tag='level') - # -> set operational limits - # self._create_capacity(m, comp, level_name, meta) - # (2, 3) separate charge/discharge trackers, so we can implement round-trip efficiency and ramp rates - charge_name = self._create_production_variable(m, comp, meta, tag='charge', add_bounds=False, within=pyo.NonPositiveReals) - discharge_name = self._create_production_variable(m, comp, meta, tag='discharge', add_bounds=False, within=pyo.NonNegativeReals) - # balance level, charge/discharge - level_rule_name = prefix + '_level_constr' - rule = lambda mod, t: prl.level_rule(comp, level_name, charge_name, discharge_name, initial_storage, r, mod, t) - setattr(m, level_rule_name, pyo.Constraint(m.T, rule=rule)) - # periodic boundary condition for storage level - if comp.get_interaction().apply_periodic_level: - periodic_rule_name = prefix + '_level_periodic_constr' - rule = lambda mod, t: prl.periodic_level_rule(comp, level_name, initial_storage, r, mod, t) - setattr(m, periodic_rule_name, pyo.Constraint(m.T, rule=rule)) - - # (4) a binary variable to track whether we're charging or discharging, to prevent BOTH happening - # -> 0 is charging, 1 is discharging - # -> TODO make this a user-based option to disable, if they want to allow dual operation - # -> -> but they should really think about if that's what they want! - # FIXME currently introducing the bigM strategy also makes solves numerically unstable, - # and frequently results in spurious errors. For now, disable it. - allow_both = True # allow simultaneous charging and discharging - if not allow_both: - bin_name = self._create_production_variable(m, comp, meta, tag='dcforcer', add_bounds=False, within=pyo.Binary) - # we need a large epsilon, but not so large that addition stops making sense - # -> we don't know what any values for this component will be! How do we choose? - # -> NOTE that choosing this value has VAST impact on solve stability!! - large_eps = 1e8 #0.01 * sys.float_info.max - # charging constraint: don't charge while discharging (note the sign matters) - charge_rule_name = prefix + '_charge_constr' - rule = lambda mod, t: prl.charge_rule(charge_name, bin_name, large_eps, r, mod, t) - setattr(m, charge_rule_name, pyo.Constraint(m.T, rule=rule)) - discharge_rule_name = prefix + '_discharge_constr' - rule = lambda mod, t: prl.discharge_rule(discharge_name, bin_name, large_eps, r, mod, t) - setattr(m, discharge_rule_name, pyo.Constraint(m.T, rule=rule)) - - - def _create_conservation(self, m, resources, initial_storage, meta): - """ - Creates pyomo conservation constraints - @ In, m, pyo.ConcreteModel, associated model - @ In, resources, list, list of resources in problem - @ In, initial_storage, dict, initial storage levels - @ In, meta, dict, dictionary of state variables - @ Out, None - """ - for resource in resources: - rule = lambda mod, t: prl.conservation_rule(resource, mod, t) - constr = pyo.Constraint(m.T, rule=rule) - setattr(m, f'{resource}_conservation', constr) - - - def _create_objective(self, meta, m): - """ - Creates pyomo objective function - @ In, meta, dict, additional variables to pass through - @ In, m, pyo.ConcreteModel, associated model - @ Out, None - """ - # cashflow eval - rule = lambda mod: prl.cashflow_rule(self._compute_cashflows, meta, mod) - m.obj = pyo.Objective(rule=rule, sense=pyo.maximize) + # ### PYOMO Element Constructors + # def _create_production_limit(self, m, validation): + # """ + # Creates pyomo production constraint given validation errors + # @ In, m, pyo.ConcreteModel, associated model + # @ In, validation, dict, information from Validator about limit violation + # @ Out, None + # """ + # # TODO could validator write a symbolic expression on request? That'd be sweet. + # comp = validation['component'] + # resource = validation['resource'] + # r = m.resource_index_map[comp][resource] + # t = validation['time_index'] + # limit = validation['limit'] + # limits = np.zeros(len(m.Times)) + # limits[t] = limit + # limit_type = validation['limit_type'] + # prod_name = f'{comp.name}_production' + # rule = lambda mod: prl.prod_limit_rule(prod_name, r, limits, limit_type, t, mod) + # constr = pyo.Constraint(rule=rule) + # counter = 1 + # name_template = f'{comp.name}_{resource}_{t}_vld_limit_constr_{{i}}' + # # make sure we get a unique name for this constraint + # name = name_template.format(i=counter) + # while getattr(m, name, None) is not None: + # counter += 1 + # name = name_template.format(i=counter) + # setattr(m, name, constr) + # print(f'DEBUGG added validation constraint "{name}"') + + + # def _create_production_param(self, m, comp, values, tag=None): + # """ + # Creates production pyomo fixed parameter object for a component + # @ In, m, pyo.ConcreteModel, associated model + # @ In, comp, HERON Component, component to make production variables for + # @ In, values, np.array(float), values to set for param + # @ In, tag, str, optional, if not None then name will be component_[tag] + # @ Out, prod_name, str, name of production variable + # """ + # name = comp.name + # if tag is None: + # tag = 'production' + # # create pyomo indexer for this component's resources + # res_indexer = pyo.Set(initialize=range(len(m.resource_index_map[comp]))) + # setattr(m, f'{name}_res_index_map', res_indexer) + # prod_name = f'{name}_{tag}' + # init = (((0, t), values[t]) for t in m.T) + # prod = pyo.Param(res_indexer, m.T, initialize=dict(init)) + # setattr(m, prod_name, prod) + # return prod_name + + + # def _create_production(self, m, comp, meta): + # """ + # Creates all pyomo variable objects for a non-storage component + # @ In, m, pyo.ConcreteModel, associated model + # @ In, comp, HERON Component, component to make production variables for + # @ In, meta, dict, dictionary of state variables + # @ Out, None + # """ + # prod_name = self._create_production_variable(m, comp, meta) + # ## if you cannot set limits directly in the production variable, set separate contraint: + # ## Method 1: set variable bounds directly --> TODO more work needed, but would be nice + # # lower, upper = self._get_prod_bounds(m, comp) + # # limits should be None unless specified, so use "getters" from dictionaries + # # bounds = lambda m, r, t: (lower.get(r, None), upper.get(r, None)) + # ## Method 2: set variable bounds directly --> TODO more work needed, but would be nice + # # self._create_capacity(m, comp, prod_name, meta) # capacity constraints + # # transfer function governs input -> output relationship + # self._create_transfer(m, comp, prod_name) + # # ramp rates + # if comp.ramp_limit is not None: + # self._create_ramp_limit(m, comp, prod_name, meta) + # return prod_name + + + # def _create_production_variable(self, m, comp, meta, tag=None, add_bounds=True, **kwargs): + # """ + # Creates production pyomo variable object for a component + # @ In, m, pyo.ConcreteModel, associated model + # @ In, comp, HERON Component, component to make production variables for + # @ In, tag, str, optional, if not None then name will be component_[tag]; otherwise "production" + # @ In, add_bounds, bool, optional, if True then determine and set bounds for variable + # @ In, kwargs, dict, optional, passalong kwargs to pyomo variable + # @ Out, prod_name, str, name of production variable + # """ + # if tag is None: + # tag = 'production' + # name = comp.name + # cap_res = comp.get_capacity_var() # name of resource that defines capacity + # limit_r = m.resource_index_map[comp][cap_res] # production index of the governing resource + # # create pyomo indexer for this component's resources + # indexer_name = f'{name}_res_index_map' + # indexer = getattr(m, indexer_name, None) + # if indexer is None: + # indexer = pyo.Set(initialize=range(len(m.resource_index_map[comp]))) + # setattr(m, indexer_name, indexer) + # prod_name = f'{name}_{tag}' + # caps, mins = self._find_production_limits(m, comp, meta) + # if min(caps) < 0: + # # quick check that capacities signs are consistent #FIXME: revisit, this is an assumption + # assert max(caps) <= 0, \ + # 'Capacities are inconsistent: mix of positive and negative values not currently supported.' + # # we have a unit that's consuming, so we need to flip the variables to be sensible + # mins, caps = caps, mins + # inits = caps + # else: + # inits = mins + # if add_bounds: + # # create bounds based in min, max operation + # bounds = lambda m, r, t: (mins[t] if r == limit_r else None, caps[t] if r == limit_r else None) + # initial = lambda m, r, t: inits[t] if r == limit_r else 0 + # else: + # bounds = (None, None) + # initial = 0 + # # production variable depends on resources, time + # #FIXME initials! Should be lambda with mins for tracking var! + # prod = pyo.Var(indexer, m.T, initialize=initial, bounds=bounds, **kwargs) + # # TODO it may be that we need to set variable values to avoid problems in some solvers. + # # if comp.is_dispatchable() == 'fixed': + # # for t, _ in enumerate(m.Times): + # # prod[limit_r, t].fix(caps[t]) + # setattr(m, prod_name, prod) + # return prod_name + + + # def _create_ramp_limit(self, m, comp, prod_name, meta): + # """ + # Creates ramping limitations for a producing component + # @ In, m, pyo.ConcreteModel, associated model + # @ In, comp, HERON Component, component to make ramping limits for + # @ In, prod_name, str, name of production variable + # @ In, meta, dict, dictionary of state variables + # @ Out, None + # """ + # # ramping is defined in terms of the capacity variable + # cap_res = comp.get_capacity_var() # name of resource that defines capacity + # cap = comp.get_capacity(meta)[0][cap_res] + # r = m.resource_index_map[comp][cap_res] # production index of the governing resource + # # NOTE: this includes the built capacity * capacity factor, if any, which assumes + # # the ramp rate depends on the available capacity, not the built capacity. + # limit_delta = comp.ramp_limit * cap # NOTE: if cap is negative, then this is negative. + # if limit_delta < 0: + # neg_cap = True + # else: + # neg_cap = False + # # if we're limiting ramp frequency, make vars and rules for that + # if comp.ramp_freq: + # # create binaries for tracking ramping + # up = pyo.Var(m.T, initialize=0, domain=pyo.Binary) + # down = pyo.Var(m.T, initialize=0, domain=pyo.Binary) + # steady = pyo.Var(m.T, initialize=1, domain=pyo.Binary) + # setattr(m, f'{comp.name}_up_ramp_tracker', up) + # setattr(m, f'{comp.name}_down_ramp_tracker', down) + # setattr(m, f'{comp.name}_steady_ramp_tracker', steady) + # ramp_trackers = (down, up, steady) + # else: + # ramp_trackers = None + # # limit production changes when ramping down + # ramp_rule_down = lambda mod, t: prl.ramp_rule_down(prod_name, r, limit_delta, neg_cap, t, mod, bins=ramp_trackers) + # constr = pyo.Constraint(m.T, rule=ramp_rule_down) + # setattr(m, f'{comp.name}_ramp_down_constr', constr) + # # limit production changes when ramping up + # ramp_rule_up = lambda mod, t: prl.ramp_rule_up(prod_name, r, limit_delta, neg_cap, t, mod, bins=ramp_trackers) + # constr = pyo.Constraint(m.T, rule=ramp_rule_up) + # setattr(m, f'{comp.name}_ramp_up_constr', constr) + # # if ramping frequency limit, impose binary constraints + # if comp.ramp_freq: + # # binaries rule, for exclusive choice up/down/steady + # binaries_rule = lambda mod, t: prl.ramp_freq_bins_rule(down, up, steady, t, mod) + # constr = pyo.Constraint(m.T, rule=binaries_rule) + # setattr(m, f'{comp.name}_ramp_freq_binaries', constr) + # # limit frequency of ramping + # # TODO calculate "tao" window using ramp freq and dt + # # -> for now, just use the integer for number of windows + # freq_rule = lambda mod, t: prl.ramp_freq_rule(down, up, comp.ramp_freq, t, m) + # constr = pyo.Constraint(m.T, rule=freq_rule) + # setattr(m, f'{comp.name}_ramp_freq_constr', constr) + + + # def _create_capacity_constraints(self, m, comp, prod_name, meta): + # """ + # Creates pyomo capacity constraints + # @ In, m, pyo.ConcreteModel, associated model + # @ In, comp, HERON Component, component to make variables for + # @ In, prod_name, str, name of production variable + # @ In, meta, dict, additional state information + # @ Out, None + # """ + # cap_res = comp.get_capacity_var() # name of resource that defines capacity + # r = m.resource_index_map[comp][cap_res] # production index of the governing resource + # caps, mins = self._find_production_limits(m, comp, meta) + # # capacity + # max_rule = lambda mod, t: prl.capacity_rule(prod_name, r, caps, mod, t) + # constr = pyo.Constraint(m.T, rule=max_rule) + # setattr(m, f'{comp.name}_{cap_res}_capacity_constr', constr) + # # minimum + # min_rule = lambda mod, t: prl.min_prod_rule(prod_name, r, caps, mins, mod, t) + # constr = pyo.Constraint(m.T, rule=min_rule) + # # set initial conditions + # for t, time in enumerate(m.Times): + # cap = caps[t] + # if cap == mins[t]: + # # initialize values so there's no boundary errors + # var = getattr(m, prod_name) + # values = var.get_values() + # for k in values: + # values[k] = cap + # var.set_values(values) + # setattr(m, f'{comp.name}_{cap_res}_minprod_constr', constr) + + + # def _find_production_limits(self, m, comp, meta): + # """ + # Determines the capacity limits of a unit's operation, in time. + # @ In, m, pyo.ConcreteModel, associated model + # @ In, comp, HERON Component, component to make variables for + # @ In, meta, dict, additional state information + # @ Out, caps, array, max production values by time + # @ Out, mins, array, min production values by time + # """ + # cap_res = comp.get_capacity_var() # name of resource that defines capacity + # r = m.resource_index_map[comp][cap_res] # production index of the governing resource + # # production is always lower than capacity + # ## NOTE get_capacity returns (data, meta) and data is dict + # ## TODO does this work with, e.g., ARMA-based capacities? + # ### -> "time" is stored on "m" and could be used to correctly evaluate the capacity + # caps = [] + # mins = [] + # for t, time in enumerate(m.Times): + # meta['HERON']['time_index'] = t + m.time_offset + # cap = comp.get_capacity(meta)[0][cap_res] # value of capacity limit (units of governing resource) + # caps.append(cap) + # if (comp.is_dispatchable() == 'fixed'): + # minimum = cap + # else: + # minimum = comp.get_minimum(meta)[0][cap_res] + # mins.append(minimum) + # return caps, mins + + + # def _create_transfer(self, m, comp, prod_name): + # """ + # Creates pyomo transfer function constraints + # @ In, m, pyo.ConcreteModel, associated model + # @ In, comp, HERON Component, component to make variables for + # @ In, prod_name, str, name of production variable + # @ Out, None + # """ + # name = comp.name + # # transfer functions + # # e.g. 2A + 3B -> 1C + 2E + # # get linear coefficients + # # TODO this could also take a transfer function from an external Python function assuming + # # we're careful about how the expression-vs-float gets used + # # and figure out how to handle multiple ins, multiple outs + # ratios = putils.get_transfer_coeffs(m, comp) + # ref_r, ref_name, _ = ratios.pop('__reference', (None, None, None)) + # for resource, ratio in ratios.items(): + # r = m.resource_index_map[comp][resource] + # rule_name = f'{name}_{resource}_{ref_name}_transfer' + # rule = lambda mod, t: prl.transfer_rule(ratio, r, ref_r, prod_name, mod, t) + # constr = pyo.Constraint(m.T, rule=rule) + # setattr(m, rule_name, constr) + + + # def _create_storage(self, m, comp, initial_storage, meta): + # """ + # Creates storage pyomo variable objects for a storage component + # Similar to create_production, but for storages + # @ In, m, pyo.ConcreteModel, associated model + # @ In, comp, HERON Component, component to make production variables for + # @ In, initial_storage, dict, initial storage levels + # @ In, meta, dict, additional state information + # @ Out, level_name, str, name of storage level variable + # """ + # prefix = comp.name + # # what resource index? Isn't it always 0? # assumption + # r = 0 # NOTE this is only true if each storage ONLY uses 1 resource + # # storages require a few variables: + # # (1) a level tracker, + # level_name = self._create_production_variable(m, comp, meta, tag='level') + # # -> set operational limits + # # self._create_capacity(m, comp, level_name, meta) + # # (2, 3) separate charge/discharge trackers, so we can implement round-trip efficiency and ramp rates + # charge_name = self._create_production_variable(m, comp, meta, tag='charge', add_bounds=False, within=pyo.NonPositiveReals) + # discharge_name = self._create_production_variable(m, comp, meta, tag='discharge', add_bounds=False, within=pyo.NonNegativeReals) + # # balance level, charge/discharge + # level_rule_name = prefix + '_level_constr' + # rule = lambda mod, t: prl.level_rule(comp, level_name, charge_name, discharge_name, initial_storage, r, mod, t) + # setattr(m, level_rule_name, pyo.Constraint(m.T, rule=rule)) + # # periodic boundary condition for storage level + # if comp.get_interaction().apply_periodic_level: + # periodic_rule_name = prefix + '_level_periodic_constr' + # rule = lambda mod, t: prl.periodic_level_rule(comp, level_name, initial_storage, r, mod, t) + # setattr(m, periodic_rule_name, pyo.Constraint(m.T, rule=rule)) + + # # (4) a binary variable to track whether we're charging or discharging, to prevent BOTH happening + # # -> 0 is charging, 1 is discharging + # # -> TODO make this a user-based option to disable, if they want to allow dual operation + # # -> -> but they should really think about if that's what they want! + # # FIXME currently introducing the bigM strategy also makes solves numerically unstable, + # # and frequently results in spurious errors. For now, disable it. + # allow_both = True # allow simultaneous charging and discharging + # if not allow_both: + # bin_name = self._create_production_variable(m, comp, meta, tag='dcforcer', add_bounds=False, within=pyo.Binary) + # # we need a large epsilon, but not so large that addition stops making sense + # # -> we don't know what any values for this component will be! How do we choose? + # # -> NOTE that choosing this value has VAST impact on solve stability!! + # large_eps = 1e8 #0.01 * sys.float_info.max + # # charging constraint: don't charge while discharging (note the sign matters) + # charge_rule_name = prefix + '_charge_constr' + # rule = lambda mod, t: prl.charge_rule(charge_name, bin_name, large_eps, r, mod, t) + # setattr(m, charge_rule_name, pyo.Constraint(m.T, rule=rule)) + # discharge_rule_name = prefix + '_discharge_constr' + # rule = lambda mod, t: prl.discharge_rule(discharge_name, bin_name, large_eps, r, mod, t) + # setattr(m, discharge_rule_name, pyo.Constraint(m.T, rule=rule)) + + + # def _create_conservation(self, m, resources): + # """ + # Creates pyomo conservation constraints + # @ In, m, pyo.ConcreteModel, associated model + # @ In, resources, list, list of resources in problem + # @ In, initial_storage, dict, initial storage levels + # @ In, meta, dict, dictionary of state variables + # @ Out, None + # """ + # for resource in resources: + # rule = lambda mod, t: prl.conservation_rule(resource, mod, t) + # constr = pyo.Constraint(m.T, rule=rule) + # setattr(m, f'{resource}_conservation', constr) + + + # def _create_objective(self, meta, m): + # """ + # Creates pyomo objective function + # @ In, meta, dict, additional variables to pass through + # @ In, m, pyo.ConcreteModel, associated model + # @ Out, None + # """ + # # cashflow eval + # rule = lambda mod: prl.cashflow_rule(self._compute_cashflows, meta, mod) + # m.obj = pyo.Objective(rule=rule, sense=pyo.maximize) From f4629c027f8a7abe6bdf5869e314c93c557bec90 Mon Sep 17 00:00:00 2001 From: Dylan McDowell Date: Tue, 19 Dec 2023 12:41:37 -0700 Subject: [PATCH 05/24] Fix PyomoModelHandler --- src/dispatch/PyomoModelHandler.py | 112 ++++++++++++++++-------------- 1 file changed, 59 insertions(+), 53 deletions(-) diff --git a/src/dispatch/PyomoModelHandler.py b/src/dispatch/PyomoModelHandler.py index 741ce9d9..2f0ff413 100644 --- a/src/dispatch/PyomoModelHandler.py +++ b/src/dispatch/PyomoModelHandler.py @@ -1,4 +1,8 @@ - +# Copyright 2020, Battelle Energy Alliance, LLC +# ALL RIGHTS RESERVED +""" + This module constructs the dispatch optimization model used by HERON. +""" import numpy as np import pyomo.environ as pyo @@ -17,23 +21,25 @@ def __init__(self, time, time_offset, case, components, resources, initial_stora self.resources = resources self.initial_storage = initial_storage self.meta = meta - self.model = pyo.ConcreteModel() + self.model = self.build_model() def build_model(self): + model = pyo.ConcreteModel() C = np.arange(0, len(self.components), dtype=int) # indexes component R = np.arange(0, len(self.resources), dtype=int) # indexes resources T = np.arange(0, len(self.time), dtype=int) # indexes resources - self.model.C = pyo.Set(initialize=C) - self.model.R = pyo.Set(initialize=R) - self.model.T = pyo.Set(initialize=T) - self.model.Times = self.time - self.model.time_offset = self.time_offset - self.model.resource_index_map = self.meta['HERON']['resource_indexer'] # maps the resource to its index WITHIN APPLICABLE components (sparse matrix) e.g. component: {resource: local index}, ... etc} + model.C = pyo.Set(initialize=C) + model.R = pyo.Set(initialize=R) + model.T = pyo.Set(initialize=T) + model.Times = self.time + model.time_offset = self.time_offset + model.resource_index_map = self.meta['HERON']['resource_indexer'] # maps the resource to its index WITHIN APPLICABLE components (sparse matrix) e.g. component: {resource: local index}, ... etc} # properties - self.model.Case = self.case - self.model.Components = self.components - self.model.Activity = PyomoState() - self.model.Activity.initialize(self.model.Components, self.model.resource_index_map, self.model.Times, self.model) + model.Case = self.case + model.Components = self.components + model.Activity = PyomoState() + model.Activity.initialize(model.Components, model.resource_index_map, model.Times, model) + return model def populate_model(self): for comp in self.components: @@ -124,7 +130,7 @@ def _create_production_param(self, m, comp, values, tag=None): return prod_name - def _create_production(self, m, comp, meta): + def _create_production(self, comp): """ Creates all pyomo variable objects for a non-storage component @ In, m, pyo.ConcreteModel, associated model @@ -132,7 +138,7 @@ def _create_production(self, m, comp, meta): @ In, meta, dict, dictionary of state variables @ Out, None """ - prod_name = self._create_production_variable(m, comp, meta) + prod_name = self._create_production_variable(comp) ## if you cannot set limits directly in the production variable, set separate contraint: ## Method 1: set variable bounds directly --> TODO more work needed, but would be nice # lower, upper = self._get_prod_bounds(m, comp) @@ -141,14 +147,14 @@ def _create_production(self, m, comp, meta): ## Method 2: set variable bounds directly --> TODO more work needed, but would be nice # self._create_capacity(m, comp, prod_name, meta) # capacity constraints # transfer function governs input -> output relationship - self._create_transfer(m, comp, prod_name) + self._create_transfer(comp, prod_name) # ramp rates if comp.ramp_limit is not None: - self._create_ramp_limit(m, comp, prod_name, meta) + self._create_ramp_limit(comp, prod_name) return prod_name - def _create_production_variable(self, m, comp, meta, tag=None, add_bounds=True, **kwargs): + def _create_production_variable(self, comp, tag=None, add_bounds=True, **kwargs): """ Creates production pyomo variable object for a component @ In, m, pyo.ConcreteModel, associated model @@ -162,15 +168,15 @@ def _create_production_variable(self, m, comp, meta, tag=None, add_bounds=True, tag = 'production' name = comp.name cap_res = comp.get_capacity_var() # name of resource that defines capacity - limit_r = m.resource_index_map[comp][cap_res] # production index of the governing resource + limit_r = self.model.resource_index_map[comp][cap_res] # production index of the governing resource # create pyomo indexer for this component's resources indexer_name = f'{name}_res_index_map' - indexer = getattr(m, indexer_name, None) + indexer = getattr(self.model, indexer_name, None) if indexer is None: - indexer = pyo.Set(initialize=range(len(m.resource_index_map[comp]))) - setattr(m, indexer_name, indexer) + indexer = pyo.Set(initialize=range(len(self.model.resource_index_map[comp]))) + setattr(self.model, indexer_name, indexer) prod_name = f'{name}_{tag}' - caps, mins = self._find_production_limits(m, comp, meta) + caps, mins = self._find_production_limits(comp) if min(caps) < 0: # quick check that capacities signs are consistent #FIXME: revisit, this is an assumption assert max(caps) <= 0, \ @@ -189,12 +195,12 @@ def _create_production_variable(self, m, comp, meta, tag=None, add_bounds=True, initial = 0 # production variable depends on resources, time #FIXME initials! Should be lambda with mins for tracking var! - prod = pyo.Var(indexer, m.T, initialize=initial, bounds=bounds, **kwargs) + prod = pyo.Var(indexer, self.model.T, initialize=initial, bounds=bounds, **kwargs) # TODO it may be that we need to set variable values to avoid problems in some solvers. # if comp.is_dispatchable() == 'fixed': # for t, _ in enumerate(m.Times): # prod[limit_r, t].fix(caps[t]) - setattr(m, prod_name, prod) + setattr(self.model, prod_name, prod) return prod_name @@ -284,7 +290,7 @@ def _create_capacity_constraints(self, m, comp, prod_name, meta): setattr(m, f'{comp.name}_{cap_res}_minprod_constr', constr) - def _find_production_limits(self, m, comp, meta): + def _find_production_limits(self, comp): """ Determines the capacity limits of a unit's operation, in time. @ In, m, pyo.ConcreteModel, associated model @@ -294,26 +300,26 @@ def _find_production_limits(self, m, comp, meta): @ Out, mins, array, min production values by time """ cap_res = comp.get_capacity_var() # name of resource that defines capacity - r = m.resource_index_map[comp][cap_res] # production index of the governing resource + r = self.model.resource_index_map[comp][cap_res] # production index of the governing resource # production is always lower than capacity ## NOTE get_capacity returns (data, meta) and data is dict ## TODO does this work with, e.g., ARMA-based capacities? ### -> "time" is stored on "m" and could be used to correctly evaluate the capacity caps = [] mins = [] - for t, time in enumerate(m.Times): - meta['HERON']['time_index'] = t + m.time_offset - cap = comp.get_capacity(meta)[0][cap_res] # value of capacity limit (units of governing resource) + for t, time in enumerate(self.model.Times): + self.meta['HERON']['time_index'] = t + self.model.time_offset + cap = comp.get_capacity(self.meta)[0][cap_res] # value of capacity limit (units of governing resource) caps.append(cap) if (comp.is_dispatchable() == 'fixed'): minimum = cap else: - minimum = comp.get_minimum(meta)[0][cap_res] + minimum = comp.get_minimum(self.meta)[0][cap_res] mins.append(minimum) return caps, mins - def _create_transfer(self, m, comp, prod_name): + def _create_transfer(self, comp, prod_name): """ Creates pyomo transfer function constraints @ In, m, pyo.ConcreteModel, associated model @@ -328,17 +334,17 @@ def _create_transfer(self, m, comp, prod_name): # TODO this could also take a transfer function from an external Python function assuming # we're careful about how the expression-vs-float gets used # and figure out how to handle multiple ins, multiple outs - ratios = putils.get_transfer_coeffs(m, comp) + ratios = putils.get_transfer_coeffs(self.model, comp) ref_r, ref_name, _ = ratios.pop('__reference', (None, None, None)) for resource, ratio in ratios.items(): - r = m.resource_index_map[comp][resource] + r = self.model.resource_index_map[comp][resource] rule_name = f'{name}_{resource}_{ref_name}_transfer' rule = lambda mod, t: prl.transfer_rule(ratio, r, ref_r, prod_name, mod, t) - constr = pyo.Constraint(m.T, rule=rule) - setattr(m, rule_name, constr) + constr = pyo.Constraint(self.model.T, rule=rule) + setattr(self.model, rule_name, constr) - def _create_storage(self, m, comp, initial_storage, meta): + def _create_storage(self, comp): """ Creates storage pyomo variable objects for a storage component Similar to create_production, but for storages @@ -353,21 +359,21 @@ def _create_storage(self, m, comp, initial_storage, meta): r = 0 # NOTE this is only true if each storage ONLY uses 1 resource # storages require a few variables: # (1) a level tracker, - level_name = self._create_production_variable(m, comp, meta, tag='level') + level_name = self._create_production_variable(comp, tag='level') # -> set operational limits # self._create_capacity(m, comp, level_name, meta) # (2, 3) separate charge/discharge trackers, so we can implement round-trip efficiency and ramp rates - charge_name = self._create_production_variable(m, comp, meta, tag='charge', add_bounds=False, within=pyo.NonPositiveReals) - discharge_name = self._create_production_variable(m, comp, meta, tag='discharge', add_bounds=False, within=pyo.NonNegativeReals) + charge_name = self._create_production_variable(comp, tag='charge', add_bounds=False, within=pyo.NonPositiveReals) + discharge_name = self._create_production_variable(comp, tag='discharge', add_bounds=False, within=pyo.NonNegativeReals) # balance level, charge/discharge level_rule_name = prefix + '_level_constr' - rule = lambda mod, t: prl.level_rule(comp, level_name, charge_name, discharge_name, initial_storage, r, mod, t) - setattr(m, level_rule_name, pyo.Constraint(m.T, rule=rule)) + rule = lambda mod, t: prl.level_rule(comp, level_name, charge_name, discharge_name, self.initial_storage, r, mod, t) + setattr(self.model, level_rule_name, pyo.Constraint(self.model.T, rule=rule)) # periodic boundary condition for storage level if comp.get_interaction().apply_periodic_level: periodic_rule_name = prefix + '_level_periodic_constr' - rule = lambda mod, t: prl.periodic_level_rule(comp, level_name, initial_storage, r, mod, t) - setattr(m, periodic_rule_name, pyo.Constraint(m.T, rule=rule)) + rule = lambda mod, t: prl.periodic_level_rule(comp, level_name, self.initial_storage, r, mod, t) + setattr(self.model, periodic_rule_name, pyo.Constraint(self.model.T, rule=rule)) # (4) a binary variable to track whether we're charging or discharging, to prevent BOTH happening # -> 0 is charging, 1 is discharging @@ -377,7 +383,7 @@ def _create_storage(self, m, comp, initial_storage, meta): # and frequently results in spurious errors. For now, disable it. allow_both = True # allow simultaneous charging and discharging if not allow_both: - bin_name = self._create_production_variable(m, comp, meta, tag='dcforcer', add_bounds=False, within=pyo.Binary) + bin_name = self._create_production_variable(comp, tag='dcforcer', add_bounds=False, within=pyo.Binary) # we need a large epsilon, but not so large that addition stops making sense # -> we don't know what any values for this component will be! How do we choose? # -> NOTE that choosing this value has VAST impact on solve stability!! @@ -385,13 +391,13 @@ def _create_storage(self, m, comp, initial_storage, meta): # charging constraint: don't charge while discharging (note the sign matters) charge_rule_name = prefix + '_charge_constr' rule = lambda mod, t: prl.charge_rule(charge_name, bin_name, large_eps, r, mod, t) - setattr(m, charge_rule_name, pyo.Constraint(m.T, rule=rule)) + setattr(self.model, charge_rule_name, pyo.Constraint(self.model.T, rule=rule)) discharge_rule_name = prefix + '_discharge_constr' rule = lambda mod, t: prl.discharge_rule(discharge_name, bin_name, large_eps, r, mod, t) - setattr(m, discharge_rule_name, pyo.Constraint(m.T, rule=rule)) + setattr(self.model, discharge_rule_name, pyo.Constraint(self.model.T, rule=rule)) - def _create_conservation(self, m, resources): + def _create_conservation(self): """ Creates pyomo conservation constraints @ In, m, pyo.ConcreteModel, associated model @@ -400,13 +406,13 @@ def _create_conservation(self, m, resources): @ In, meta, dict, dictionary of state variables @ Out, None """ - for resource in resources: + for resource in self.resources: rule = lambda mod, t: prl.conservation_rule(resource, mod, t) - constr = pyo.Constraint(m.T, rule=rule) - setattr(m, f'{resource}_conservation', constr) + constr = pyo.Constraint(self.model.T, rule=rule) + setattr(self.model, f'{resource}_conservation', constr) - def _create_objective(self, meta, m): + def _create_objective(self): """ Creates pyomo objective function @ In, meta, dict, additional variables to pass through @@ -414,8 +420,8 @@ def _create_objective(self, meta, m): @ Out, None """ # cashflow eval - rule = lambda mod: prl.cashflow_rule(self._compute_cashflows, meta, mod) - m.obj = pyo.Objective(rule=rule, sense=pyo.maximize) + rule = lambda mod: prl.cashflow_rule(self._compute_cashflows, self.meta, mod) + self.model.obj = pyo.Objective(rule=rule, sense=pyo.maximize) From 47e7792b54812e641abc5862964e05e409cca1b3 Mon Sep 17 00:00:00 2001 From: Dylan McDowell Date: Tue, 19 Dec 2023 13:43:16 -0700 Subject: [PATCH 06/24] Working PMH for now... --- src/dispatch/PyomoModelHandler.py | 107 ++++++++++++++++++++++++++++++ 1 file changed, 107 insertions(+) diff --git a/src/dispatch/PyomoModelHandler.py b/src/dispatch/PyomoModelHandler.py index 2f0ff413..a00b5595 100644 --- a/src/dispatch/PyomoModelHandler.py +++ b/src/dispatch/PyomoModelHandler.py @@ -12,6 +12,8 @@ class PyomoModelHandler: + _eps = 1e-9 + def __init__(self, time, time_offset, case, components, resources, initial_storage, meta) -> None: self.time = time self.time_offset = time_offset @@ -423,6 +425,111 @@ def _create_objective(self): rule = lambda mod: prl.cashflow_rule(self._compute_cashflows, self.meta, mod) self.model.obj = pyo.Objective(rule=rule, sense=pyo.maximize) + def _compute_cashflows(self, components, activity, times, meta, state_args=None, time_offset=0): + """ + Method to compute CashFlow evaluations given components and their activity. + @ In, components, list, HERON components whose cashflows should be evaluated + @ In, activity, DispatchState instance, activity by component/resources/time + @ In, times, np.array(float), time values to evaluate; may be length 1 or longer + @ In, meta, dict, additional info to be passed through to functional evaluations + @ In, state_args, dict, optional, additional arguments to pass while getting activity state + @ In, time_offset, int, optional, increase time index tracker by this value if provided + @ Out, total, float, total cashflows for given components + """ + if state_args is None: + state_args = {} + + if meta['HERON']['Case'].use_levelized_inner: + total = self._compute_levelized_cashflows(components, activity, times, meta, state_args, time_offset) + return total + + total = 0 + specific_meta = dict(meta) # TODO what level of copying do we need here? + resource_indexer = meta['HERON']['resource_indexer'] + + #print('DEBUGG computing cashflows!') + for comp in components: + #print(f'DEBUGG ... comp {comp.name}') + specific_meta['HERON']['component'] = comp + comp_subtotal = 0 + for t, time in enumerate(times): + #print(f'DEBUGG ... ... time {t}') + # NOTE care here to assure that pyomo-indexed variables work here too + specific_activity = {} + for tracker in comp.get_tracking_vars(): + specific_activity[tracker] = {} + for resource in resource_indexer[comp]: + specific_activity[tracker][resource] = activity.get_activity(comp, tracker, resource, time, **state_args) + specific_meta['HERON']['time_index'] = t + time_offset + specific_meta['HERON']['time_value'] = time + cfs = comp.get_state_cost(specific_activity, specific_meta, marginal=True) + time_subtotal = sum(cfs.values()) + comp_subtotal += time_subtotal + total += comp_subtotal + return total + + def _compute_levelized_cashflows(self, components, activity, times, meta, state_args=None, time_offset=0): + """ + Method to compute CashFlow evaluations given components and their activity. + @ In, components, list, HERON components whose cashflows should be evaluated + @ In, activity, DispatchState instance, activity by component/resources/time + @ In, times, np.array(float), time values to evaluate; may be length 1 or longer + @ In, meta, dict, additional info to be passed through to functional evaluations + @ In, state_args, dict, optional, additional arguments to pass while getting activity state + @ In, time_offset, int, optional, increase time index tracker by this value if provided + @ Out, total, float, total cashflows for given components + """ + total = 0 + specific_meta = dict(meta) # TODO what level of copying do we need here? + resource_indexer = meta['HERON']['resource_indexer'] + + # How does this work? + # The general equation looks like: + # + # SUM(Non-Multiplied Terms) + x * SUM(Multiplied Terms) = Target + # + # and we are solving for `x`. Target is 0 by default. Terms here are marginal cashflows. + # Summations here occur over: components, time steps, tracking variables, and resources. + # Typically, there is only 1 multiplied term/cash flow. + + multiplied = 0 + non_multiplied = 0 + + for comp in components: + specific_meta['HERON']['component'] = comp + multiplied_comp = 0 + non_multiplied_comp = 0 + for t, time in enumerate(times): + # NOTE care here to assure that pyomo-indexed variables work here too + specific_activity = {} + for tracker in comp.get_tracking_vars(): + specific_activity[tracker] = {} + for resource in resource_indexer[comp]: + specific_activity[tracker][resource] = activity.get_activity(comp, tracker, resource, time, **state_args) + specific_meta['HERON']['time_index'] = t + time_offset + specific_meta['HERON']['time_value'] = time + cfs = comp.get_state_cost(specific_activity, specific_meta, marginal=True) + + # there is an assumption here that if a component has a levelized cost, marginal cashflow + # then it is the only marginal cashflow + if comp.levelized_meta: + for cf in comp.levelized_meta.keys(): + lcf = cfs.pop(cf) # this should be ok as long as HERON init checks are successful + multiplied_comp += lcf + else: + time_subtotal = sum(cfs.values()) + non_multiplied_comp += time_subtotal + + multiplied += multiplied_comp + non_multiplied += non_multiplied_comp + + # at this point, there should be a not None NPV Target + multiplied += self._eps + total = (meta['HERON']['Case'].npv_target - non_multiplied) / multiplied + total *= -1 + return total + + From d3fb4358696916da5def956c299a93230aec644f Mon Sep 17 00:00:00 2001 From: talbpw Date: Tue, 28 Nov 2023 13:06:46 -0700 Subject: [PATCH 07/24] moving to branch off of DOVE refactor --- src/ValuedParams/Linear.py | 27 ++++------ src/ValuedParams/Polynomial.py | 95 ++++++++++++++++++++++++++++++++++ 2 files changed, 106 insertions(+), 16 deletions(-) create mode 100644 src/ValuedParams/Polynomial.py diff --git a/src/ValuedParams/Linear.py b/src/ValuedParams/Linear.py index 1ec9a0bb..932be58a 100644 --- a/src/ValuedParams/Linear.py +++ b/src/ValuedParams/Linear.py @@ -6,9 +6,9 @@ Primarily intended for transfer functions. """ from .ValuedParam import ValuedParam, InputData, InputTypes +from Polynomial import Polynomial -# class for custom dynamically-evaluated quantities -class Linear(ValuedParam): +class Linear(Polynomial): """ Represents a ValuedParam that is a linearized transfer function. """ @@ -16,10 +16,13 @@ class Linear(ValuedParam): @classmethod def get_input_specs(cls): """ - Template for parameters that can take a scalar, an ARMA history, or a function + Define parameters for a linear transfer function. @ In, None @ Out, spec, InputData, value-based spec """ + # TODO should this be reconciled with Polynomial? + # Pro: maintainability + # Con: degraded user experience spec = InputData.parameterInputFactory('linear', contentType=InputTypes.StringType, descr=r"""indicates this value should be interpreted as a ratio based on an input value.""") rate = InputData.parameterInputFactory('rate', contentType=InputTypes.FloatType, @@ -36,7 +39,6 @@ def __init__(self): @ Out, None """ super().__init__() - self._coefficients = None # ratios, stored in a dict as key: value def read(self, comp_name, spec, mode, alias_dict=None): """ @@ -48,20 +50,12 @@ def read(self, comp_name, spec, mode, alias_dict=None): @ Out, needs, list, signals needed to evaluate this ValuedParam at runtime """ super().read(comp_name, spec, mode, alias_dict=None) - self._coefficients = {} for rate_node in spec.findAll('rate'): resource = rate_node.parameterValues['resource'] - self._coefficients[resource] = rate_node.value + # linear coefficients are all 1 + self._coefficients[(resource)][(1)] = rate_node.value return [] - def get_coefficients(self): - """ - Returns linear coefficients. - @ In, None - @ Out, coeffs, dict, coefficient mapping - """ - return self._coefficients - def evaluate(self, inputs, target_var=None, aliases=None): """ Evaluate this ValuedParam, wherever it gets its data from @@ -71,11 +65,12 @@ def evaluate(self, inputs, target_var=None, aliases=None): @ Out, balance, dict, dictionary of resulting evaluation as {vars: vals} @ Out, inputs, dict, dictionary of meta (possibly changed during evaluation) """ - if target_var not in self._coefficients: + # TODO is this an unnecessarily slow check? + if target_var not in [x[0] for x in self._coefficients]: self.raiseAnError(RuntimeError, f'"rate" for target variable "{target_var}" not found for ' + f'ValuedParam {self.name}!') req_res, req_amt = next(iter(inputs['request'].items())) - req_rate = self._coefficients[req_res] + req_rate = self._coefficients[(req_res)][(1)] balance = {req_res: req_amt} for res, rate in self._coefficients.items(): if res == req_res: diff --git a/src/ValuedParams/Polynomial.py b/src/ValuedParams/Polynomial.py new file mode 100644 index 00000000..41de889b --- /dev/null +++ b/src/ValuedParams/Polynomial.py @@ -0,0 +1,95 @@ + +# Copyright 2020, Battelle Energy Alliance, LLC +# ALL RIGHTS RESERVED +""" + Values that are expressed as a polynomial relationship. + For example, ax^2 + bxy + cy^2 + dx + ey = fm^2 + gmn + hn^2 + im + jn + k + Primarily intended for transfer functions. +""" +from collections import defaultdict + +from .ValuedParam import ValuedParam, InputData, InputTypes + +class Polynomial(ValuedParam): + """ + Represents a ValuedParam that is a polynomial relationship. + """ + + @classmethod + def get_input_specs(cls): + """ + Define parameters for a polynomial transfer function. + @ In, None + @ Out, spec, InputData, value-based spec + """ + spec = InputData.parameterInputFactory('poly', contentType=InputTypes.StringType, + descr=r"""indicates this transfer function is expressed by a polynomial relationship of arbitrary order. + Note the polynomial must be specified in weak form, with all terms on one side of the equation + set equal to zero. For instance, the equation $ax^2 + bx + c = dy^2 + fy + g$ should be reformulated + as $ax^2 + bx + (c-g) - dy^2 - fy = 0$.""") + coeff = InputData.parameterInputFactory('coeff', contentType=InputTypes.FloatType, + descr=r"""one coefficient for one poloynomial term of the specified \xmlAttr{resources}.""") + coeff.addParam('resource', param_type=InputTypes.StringListType, + descr=r"""indicates the resource(s) for which the polynomial coefficient is being provided in this node. + Note that the order of the resources matters for specifying the polynomial \xmlAttr{order}.""") + coeff.addParam('order', param_type=InputTypes.FloatListType, + descr=r"""indicates the orders of the polynomial for each resource specified, in order. + For example, if \xmlAttr{resources} is ``x, y'', then order ``2,3'' would mean + the specified coefficient is for $x^{2}y^{3}$.""") + spec.addSub(coeff) + return spec + + def __init__(self): + """ + Constructor. + @ In, None + @ Out, None + """ + super().__init__() + self._coefficients = defaultdict(dict) # coeffs, stored as {(resources): {(order): coeff}} + + def read(self, comp_name, spec, mode, alias_dict=None): + """ + Used to read valued param from XML input + @ In, comp_name, str, name of component that this valued param will be attached to; only used for print messages + @ In, spec, InputData params, input specifications + @ In, mode, type of simulation calculation + @ In, alias_dict, dict, optional, aliases to use for variable naming + @ Out, needs, list, signals needed to evaluate this ValuedParam at runtime + """ + super().read(comp_name, spec, mode, alias_dict=None) + for coeff_node in spec.findAll('coeff'): + resource = coeff_node.parameterValues['resource'] + order = coeff_node.parameterValues['order'] + # CHECKME does this preserve order correctly? + self._coefficients[tuple(resource)][tuple(order)] = coeff_node.value + return [] + + def get_coefficients(self): + """ + Returns linear coefficients. + @ In, None + @ Out, coeffs, dict, coefficient mapping + """ + return self._coefficients + + def evaluate(self, inputs, target_var=None, aliases=None): + """ + Evaluate this ValuedParam, wherever it gets its data from + @ In, inputs, dict, stuff from RAVEN, particularly including the keys 'meta' and 'raven_vars' + @ In, target_var, str, optional, requested outgoing variable name if not None + @ In, aliases, dict, optional, alternate variable names for searching in variables + @ Out, balance, dict, dictionary of resulting evaluation as {vars: vals} + @ Out, inputs, dict, dictionary of meta (possibly changed during evaluation) + """ + if target_var not in self._coefficients: + self.raiseAnError(RuntimeError, f'"rate" for target variable "{target_var}" not found for ' + + f'ValuedParam {self.name}!') + req_res, req_amt = next(iter(inputs['request'].items())) + req_rate = self._coefficients[req_res] + balance = {req_res: req_amt} + for res, rate in self._coefficients.items(): + if res == req_res: + continue + balance[res] = rate / req_rate * req_amt + return balance, inputs From 19bf51fb2b99fdf9655aeb8b3e86482f4fa5c52e Mon Sep 17 00:00:00 2001 From: talbpw Date: Tue, 19 Dec 2023 12:48:10 -0700 Subject: [PATCH 08/24] rebase fixes, needs more fixing to run --- dependencies.xml | 1 + src/ValuedParams/Factory.py | 2 + src/ValuedParams/Linear.py | 4 +- src/dispatch/PyomoModelHandler.py | 44 +++++--- src/dispatch/PyomoRuleLibrary.py | 21 ++-- src/dispatch/putils.py | 9 +- .../mechanics/transfer_funcs/heron_input.xml | 102 ++++++++++++++++++ .../mechanics/transfer_funcs/tests | 27 +++++ tests/unit_tests/test_PyomoModelHandler.py | 71 ++++++++++++ 9 files changed, 256 insertions(+), 25 deletions(-) create mode 100644 tests/integration_tests/mechanics/transfer_funcs/heron_input.xml create mode 100644 tests/integration_tests/mechanics/transfer_funcs/tests create mode 100644 tests/unit_tests/test_PyomoModelHandler.py diff --git a/dependencies.xml b/dependencies.xml index 71669901..bf35e6e2 100644 --- a/dependencies.xml +++ b/dependencies.xml @@ -1,6 +1,7 @@
+ 0.3.5 1.1.0 6.4 diff --git a/src/ValuedParams/Factory.py b/src/ValuedParams/Factory.py index cbcabb44..57f6b1f6 100644 --- a/src/ValuedParams/Factory.py +++ b/src/ValuedParams/Factory.py @@ -11,6 +11,7 @@ from .Function import Function from .Parametric import Parametric, FixedValue, OptBounds, SweepValues from .Linear import Linear +from .Polynomial import Polynomial from .Variable import Variable from .Activity import Activity @@ -76,6 +77,7 @@ def make_input_specs(self, name, descr=None, allowed=None, kind='singular'): factory.registerType('activity', Activity) # ratios, transfers factory.registerType('linear', Linear) +factory.registerType('poly', Polynomial) # TODO add: ROM # TODO are transfer functions and valued evaluations really the same creature? diff --git a/src/ValuedParams/Linear.py b/src/ValuedParams/Linear.py index 932be58a..0daff4be 100644 --- a/src/ValuedParams/Linear.py +++ b/src/ValuedParams/Linear.py @@ -6,7 +6,7 @@ Primarily intended for transfer functions. """ from .ValuedParam import ValuedParam, InputData, InputTypes -from Polynomial import Polynomial +from .Polynomial import Polynomial class Linear(Polynomial): """ @@ -52,7 +52,7 @@ def read(self, comp_name, spec, mode, alias_dict=None): super().read(comp_name, spec, mode, alias_dict=None) for rate_node in spec.findAll('rate'): resource = rate_node.parameterValues['resource'] - # linear coefficients are all 1 + # linear coefficients are all order 1, no cross-variable multipyling self._coefficients[(resource)][(1)] = rate_node.value return [] diff --git a/src/dispatch/PyomoModelHandler.py b/src/dispatch/PyomoModelHandler.py index a00b5595..e64219e7 100644 --- a/src/dispatch/PyomoModelHandler.py +++ b/src/dispatch/PyomoModelHandler.py @@ -23,7 +23,7 @@ def __init__(self, time, time_offset, case, components, resources, initial_stora self.resources = resources self.initial_storage = initial_storage self.meta = meta - self.model = self.build_model() + self.model = self.build_model() def build_model(self): model = pyo.ConcreteModel() @@ -326,25 +326,42 @@ def _create_transfer(self, comp, prod_name): Creates pyomo transfer function constraints @ In, m, pyo.ConcreteModel, associated model @ In, comp, HERON Component, component to make variables for - @ In, prod_name, str, name of production variable @ Out, None """ name = comp.name # transfer functions # e.g. 2A + 3B -> 1C + 2E + + #### OLD #### # get linear coefficients # TODO this could also take a transfer function from an external Python function assuming # we're careful about how the expression-vs-float gets used # and figure out how to handle multiple ins, multiple outs - ratios = putils.get_transfer_coeffs(self.model, comp) - ref_r, ref_name, _ = ratios.pop('__reference', (None, None, None)) - for resource, ratio in ratios.items(): - r = self.model.resource_index_map[comp][resource] - rule_name = f'{name}_{resource}_{ref_name}_transfer' - rule = lambda mod, t: prl.transfer_rule(ratio, r, ref_r, prod_name, mod, t) - constr = pyo.Constraint(self.model.T, rule=rule) - setattr(self.model, rule_name, constr) + # ratios = putils.get_transfer_coeffs(self.model, comp) + # ref_r, ref_name, _ = ratios.pop('__reference', (None, None, None)) + # for resource, ratio in ratios.items(): + # r = self.model.resource_index_map[comp][resource] + # rule_name = f'{name}_{resource}_{ref_name}_transfer' + # rule = lambda mod, t: prl.transfer_rule(ratio, r, ref_r, prod_name, mod, t) + # constr = pyo.Constraint(self.model.T, rule=rule) + # setattr(self.model, rule_name, constr) + # XXX working here ... instead of making lots of little constraints, make one transfer constraint + # using the coefficients and polynomial orders + transfer = comp.get_interaction().get_transfer() + if transfer is None: + return {} + + coeffs = transfer.get_coefficients() + # dict of form {(r1, r2): {(o1, o2): n}} + # where: + # r1, r2 are resource names + # o1, o2 are polynomial orders (may not be integers?) + # n is the float polynomial coefficient for the term + rule_name = f'{name}_transfer_func' + rule = lambda mod, t: prl.transfer_rules(coeffs, m.resource_index_map[comp], prod_name, mod, t) + constr = pyo.Constraint(m.T, rule=rule) + setattr(m, rule_name, constr) def _create_storage(self, comp): """ @@ -530,7 +547,8 @@ def _compute_levelized_cashflows(self, components, activity, times, meta, state_ return total - - - \ No newline at end of file + + + + diff --git a/src/dispatch/PyomoRuleLibrary.py b/src/dispatch/PyomoRuleLibrary.py index 125c7860..93a4a203 100644 --- a/src/dispatch/PyomoRuleLibrary.py +++ b/src/dispatch/PyomoRuleLibrary.py @@ -281,16 +281,25 @@ def min_prod_rule(prod_name, r, caps, minimums, m, t) -> bool: else: return prod[r, t] <= minimums[t] -def transfer_rule(ratio, r, ref_r, prod_name, m, t) -> bool: +def transfer_rule(coeffs, r_map, prod_name, m, t) -> bool: """ Constructs transfer function constraints - @ In, ratio, float, ratio for resource to nominal first resource - @ In, r, int, index of transfer resource - @ In, ref_r, int, index of reference resource + @ In, coeffs, dict, nested mapping of resources and polynomial orders to coefficients + as {(r1, r2): {(o1, o2): n}} + @ In, r_map, dict, mapping of resources to activity indices for this component @ In, prod_name, str, name of production variable @ In, m, pyo.ConcreteModel, associated model @ In, t, int, index of time variable @ Out, transfer, bool, transfer ratio check """ - prod = getattr(m, prod_name) - return prod[r, t] == prod[ref_r, t] * ratio # TODO tolerance?? + activity = getattr(m, prod_name) + eqn = 0 + for resources, ord_dict in coeffs.items(): + for orders, coeff in ord_dict.items(): + term = coeff + for i, res in enumerate(resources): + r = r_map[res] + prod = activity[r, t] + term *= prod ** orders[i] + eqn += term + return eqn == 0 diff --git a/src/dispatch/putils.py b/src/dispatch/putils.py index ed31ea43..b07aec68 100644 --- a/src/dispatch/putils.py +++ b/src/dispatch/putils.py @@ -54,6 +54,7 @@ def get_prod_bounds(m, comp, meta): # return lower, upper def get_transfer_coeffs(m, comp) -> dict: + # FIXME DEPRECATE """ Obtains transfer function ratios (assuming Linear ValuedParams) Form: 1A + 3B -> 2C + 4D @@ -66,7 +67,7 @@ def get_transfer_coeffs(m, comp) -> dict: transfer = comp.get_interaction().get_transfer() if transfer is None: return {} - + # linear transfer coefficients, dict as {resource: coeff}, SIGNS MATTER # it's all about ratios -> store as ratio of resource / first resource (arbitrary) coeffs = transfer.get_coefficients() @@ -77,7 +78,7 @@ def get_transfer_coeffs(m, comp) -> dict: for resource, coef in coeffs_iter: ratios[resource] = coef / first_coef - + return ratios def retrieve_solution(m) -> dict: @@ -153,6 +154,6 @@ def debug_print_soln(m) -> None: elif kind == 'Param': value = prod[r, t] output.append(f' time: {t + m.time_offset} {time} {value}') - + output.append('*' * 80) - print('\n'.join(output)) \ No newline at end of file + print('\n'.join(output)) diff --git a/tests/integration_tests/mechanics/transfer_funcs/heron_input.xml b/tests/integration_tests/mechanics/transfer_funcs/heron_input.xml new file mode 100644 index 00000000..64ad1fc0 --- /dev/null +++ b/tests/integration_tests/mechanics/transfer_funcs/heron_input.xml @@ -0,0 +1,102 @@ + + + TransferFuncs + talbpaul + 2023-11-28 + + Tests various transfer function forms. Make the Source large enough that all transformers play. + + HERON + + + + sweep + + 1 + 1 + + 1 + + Time + 2 + 21 + + + 3 + 0.08 + 0.3 + 0.02 + 50 + + + + + + + + + + + 1, 1000 + + + + 2 + + + + + + funding + + -100 + + + + -1 + 0.25 + + + + + 2 + + + + + + + -1e3 + + + + 3 + + + work + + + -1 + + + + + + + + + -1e4 + + + + 3 + + + + + + + %BASE_WORKING_DIR%/../../../TSA/Sine/arma.pk + + + diff --git a/tests/integration_tests/mechanics/transfer_funcs/tests b/tests/integration_tests/mechanics/transfer_funcs/tests new file mode 100644 index 00000000..21236e68 --- /dev/null +++ b/tests/integration_tests/mechanics/transfer_funcs/tests @@ -0,0 +1,27 @@ +[Tests] + [./DebugMode] + type = 'HeronIntegration' + input = heron_input.xml + [./dispatch_db] + type = NetCDF + output = 'Debug_Run_o/dispatch.nc' + gold_files = 'dispatch.nc' + [../] + [./dispatch_csv] + type = UnorderedCSV + output = 'Debug_Run_o/dispatch_print.csv' + gold_files = 'dispatch_print.csv' + rel_err = 1e-8 + [../] + [./debug_plot] + type = Exists + output = 'Debug_Run_o/dispatch_id0_y10_c0.png Debug_Run_o/dispatch_id0_y11_c0.png Debug_Run_o/dispatch_id1_y10_c0.png Debug_Run_o/dispatch_id1_y11_c0.png' + [../] + [./debug_plot] + type = Exists + output = 'network.png' + [../] + [../] +[] + + diff --git a/tests/unit_tests/test_PyomoModelHandler.py b/tests/unit_tests/test_PyomoModelHandler.py new file mode 100644 index 00000000..e464db8e --- /dev/null +++ b/tests/unit_tests/test_PyomoModelHandler.py @@ -0,0 +1,71 @@ +''' + Test specific aspects of HERON Pyomo dispatcher's PyomoModeHandler +''' + +import os +import sys +import pytest +import xml.etree.ElementTree as ET + +# Load HERON tools +HERON_LOC = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir)) +sys.path.append(HERON_LOC) +from HERON.src import Components +from HERON.src import _utils as hutils +from HERON.src.ValuedParamHandler import ValuedParamHandler as VPH +from HERON.src.ValuedParams import factory as VPfactory +from HERON.src.dispatch.PyomoModelHandler import PyomoModelHandler as PMH +pmh = PMH(0, 0, None, None, None, None, None) +sys.path.pop() + +import pyomo.environ as pyo +try: + import ravenframework +except ModuleNotFoundError: + # Load RAVEN tools + sys.path.append(hutils.get_raven_loc()) +#from ravenframework.utils import InputData, xmlUtils,InputTypes +import ravenframework.MessageHandler as MessageHandler + + +@pytest.fixture +def component(): + transfer = VPfactory.returnInstance('poly') + # encode af^2 + bf + cf^0.2 + k_1 = dw + k_2 + # becomes af^2 + bf + cf^0.2 + k - dw = 0 + # divide out d for new a, b, c, k + # af^2 + bf + cf^0.2 + k - w = 0 + # where: + # f is "funding" in [0,400], + # w is "work" in ~ 37 when x~14.5 + # a, b, c, d, are all scalar coefficients: + # a = 1e-3 + # b = -0.5 + # c = 20 + # k = 10 + coeffs = { + ('funding'): {(2): 1e-3, (1): -0.5, (0.2): 20, (0): 10} + } + transfer._coefficients = coeffs + comp = Components.Producer() + comp.name = 'postdoc' + comp.messageHandler = MessageHandler.MessageHandler() + comp.messageHandler.verbosity = 'debug' + comp._capacity_var = 'funding' + comp._capacity = VPH('researcher_capacity') + comp._capacity.set_const_VP(0) + comp.set_capacity(400) + comp._transfer = transfer + yield comp + +class TestHeronPMH: + + def test_prod_tracker(self, component): + assert component.get_tracking_vars() == ['production'] + + def test_transfer(self, component): + m = pyo.ConcreteModel() + pmh._create_transfer(m, component) + print(m) + assert False + From f28b5552f446684aea2ff0710f21523a11223e98 Mon Sep 17 00:00:00 2001 From: talbpw Date: Tue, 2 Jan 2024 12:37:42 -0700 Subject: [PATCH 09/24] stash --- src/ValuedParams/Factory.py | 2 +- src/ValuedParams/Linear.py | 4 +- src/ValuedParams/Polynomial.py | 6 ++- src/dispatch/PyomoModelHandler.py | 37 ++++++------------- src/dispatch/PyomoRuleLibrary.py | 1 + .../mechanics/transfer_funcs/heron_input.xml | 35 +++++++++++++++++- 6 files changed, 55 insertions(+), 30 deletions(-) diff --git a/src/ValuedParams/Factory.py b/src/ValuedParams/Factory.py index 57f6b1f6..2145d96c 100644 --- a/src/ValuedParams/Factory.py +++ b/src/ValuedParams/Factory.py @@ -85,7 +85,7 @@ def make_input_specs(self, name, descr=None, allowed=None, kind='singular'): # map of "kinds" of ValuedParams to the default acceptable ValuedParam types allowable = {} # transfer functions, such as producing components' transfer functions -allowable['transfer'] = ['linear', 'Function'] +allowable['transfer'] = ['linear', 'poly', 'Function'] # single evaluations, like cashflow prices and component capacities allowable['singular'] = [ 'fixed_value', diff --git a/src/ValuedParams/Linear.py b/src/ValuedParams/Linear.py index 0daff4be..61cb58ea 100644 --- a/src/ValuedParams/Linear.py +++ b/src/ValuedParams/Linear.py @@ -8,6 +8,8 @@ from .ValuedParam import ValuedParam, InputData, InputTypes from .Polynomial import Polynomial +ONE_TUP = tuple([1]) + class Linear(Polynomial): """ Represents a ValuedParam that is a linearized transfer function. @@ -53,7 +55,7 @@ def read(self, comp_name, spec, mode, alias_dict=None): for rate_node in spec.findAll('rate'): resource = rate_node.parameterValues['resource'] # linear coefficients are all order 1, no cross-variable multipyling - self._coefficients[(resource)][(1)] = rate_node.value + self._coefficients[tuple([resource])][ONE_TUP] = rate_node.value return [] def evaluate(self, inputs, target_var=None, aliases=None): diff --git a/src/ValuedParams/Polynomial.py b/src/ValuedParams/Polynomial.py index 41de889b..1fd94ee8 100644 --- a/src/ValuedParams/Polynomial.py +++ b/src/ValuedParams/Polynomial.py @@ -7,6 +7,7 @@ Primarily intended for transfer functions. """ from collections import defaultdict +from pprint import pprint from .ValuedParam import ValuedParam, InputData, InputTypes @@ -46,7 +47,10 @@ def __init__(self): @ Out, None """ super().__init__() - self._coefficients = defaultdict(dict) # coeffs, stored as {(resources): {(order): coeff}} + # coeffs, stored as { (resources): { (order): coeff, }, } + # e.g., {(water, flour): {(1,1): 42, (1,2): 3.14}, + # (water): {(1): 1.61} } + self._coefficients = defaultdict(dict) def read(self, comp_name, spec, mode, alias_dict=None): """ diff --git a/src/dispatch/PyomoModelHandler.py b/src/dispatch/PyomoModelHandler.py index e64219e7..985afa56 100644 --- a/src/dispatch/PyomoModelHandler.py +++ b/src/dispatch/PyomoModelHandler.py @@ -140,6 +140,7 @@ def _create_production(self, comp): @ In, meta, dict, dictionary of state variables @ Out, None """ + print('DEBUGG creating production:', comp.name) prod_name = self._create_production_variable(comp) ## if you cannot set limits directly in the production variable, set separate contraint: ## Method 1: set variable bounds directly --> TODO more work needed, but would be nice @@ -326,42 +327,26 @@ def _create_transfer(self, comp, prod_name): Creates pyomo transfer function constraints @ In, m, pyo.ConcreteModel, associated model @ In, comp, HERON Component, component to make variables for + @ In, prod_name, str, production variable name @ Out, None """ - name = comp.name - # transfer functions - # e.g. 2A + 3B -> 1C + 2E - - #### OLD #### - # get linear coefficients - # TODO this could also take a transfer function from an external Python function assuming - # we're careful about how the expression-vs-float gets used - # and figure out how to handle multiple ins, multiple outs - - # ratios = putils.get_transfer_coeffs(self.model, comp) - # ref_r, ref_name, _ = ratios.pop('__reference', (None, None, None)) - # for resource, ratio in ratios.items(): - # r = self.model.resource_index_map[comp][resource] - # rule_name = f'{name}_{resource}_{ref_name}_transfer' - # rule = lambda mod, t: prl.transfer_rule(ratio, r, ref_r, prod_name, mod, t) - # constr = pyo.Constraint(self.model.T, rule=rule) - # setattr(self.model, rule_name, constr) - # XXX working here ... instead of making lots of little constraints, make one transfer constraint - # using the coefficients and polynomial orders + print('DEBUGG creating transfer:', comp.name) + rule_name = f'{comp.name}_transfer_func' transfer = comp.get_interaction().get_transfer() if transfer is None: - return {} - + print('DEBUGG skipping transfer:', comp.name) + return coeffs = transfer.get_coefficients() + print('DEBUGG coeffs:', coeffs) # dict of form {(r1, r2): {(o1, o2): n}} # where: # r1, r2 are resource names # o1, o2 are polynomial orders (may not be integers?) # n is the float polynomial coefficient for the term - rule_name = f'{name}_transfer_func' - rule = lambda mod, t: prl.transfer_rules(coeffs, m.resource_index_map[comp], prod_name, mod, t) - constr = pyo.Constraint(m.T, rule=rule) - setattr(m, rule_name, constr) + rule = lambda mod, t: prl.transfer_rule(coeffs, self.model.resource_index_map[comp], prod_name, mod, t) + constr = pyo.Constraint(self.model.T, rule=rule) + setattr(self.model, rule_name, constr) + def _create_storage(self, comp): """ diff --git a/src/dispatch/PyomoRuleLibrary.py b/src/dispatch/PyomoRuleLibrary.py index 93a4a203..30517983 100644 --- a/src/dispatch/PyomoRuleLibrary.py +++ b/src/dispatch/PyomoRuleLibrary.py @@ -298,6 +298,7 @@ def transfer_rule(coeffs, r_map, prod_name, m, t) -> bool: for orders, coeff in ord_dict.items(): term = coeff for i, res in enumerate(resources): + print(f'DEBUGG ... rsr {resources}, term {term}, i {i}, res {res}') r = r_map[res] prod = activity[r, t] term *= prod ** orders[i] diff --git a/tests/integration_tests/mechanics/transfer_funcs/heron_input.xml b/tests/integration_tests/mechanics/transfer_funcs/heron_input.xml index 64ad1fc0..ace2f91f 100644 --- a/tests/integration_tests/mechanics/transfer_funcs/heron_input.xml +++ b/tests/integration_tests/mechanics/transfer_funcs/heron_input.xml @@ -34,7 +34,7 @@ - + 1, 1000 @@ -45,6 +45,17 @@ + + + + 500 + + + + 2 + + + funding @@ -63,6 +74,28 @@ + + + funding, labor + + -100 + + + + + -0.9 + -1 + -0.01 + -0.2 + 1 + + + + + 2 + + + From ed2ab80cd2c282247c40b77359f209a0029923d7 Mon Sep 17 00:00:00 2001 From: talbpw Date: Wed, 3 Jan 2024 08:18:57 -0700 Subject: [PATCH 10/24] pre-rebase --- src/ValuedParams/Polynomial.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/ValuedParams/Polynomial.py b/src/ValuedParams/Polynomial.py index 1fd94ee8..fa114901 100644 --- a/src/ValuedParams/Polynomial.py +++ b/src/ValuedParams/Polynomial.py @@ -7,7 +7,6 @@ Primarily intended for transfer functions. """ from collections import defaultdict -from pprint import pprint from .ValuedParam import ValuedParam, InputData, InputTypes From 4d7af2c4e9f493502195f30d2669524fab25fe3e Mon Sep 17 00:00:00 2001 From: talbpw Date: Wed, 3 Jan 2024 08:19:32 -0700 Subject: [PATCH 11/24] more prerebase --- .../mechanics/transfer_funcs/heron_input.xml | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/tests/integration_tests/mechanics/transfer_funcs/heron_input.xml b/tests/integration_tests/mechanics/transfer_funcs/heron_input.xml index ace2f91f..8dc4a2aa 100644 --- a/tests/integration_tests/mechanics/transfer_funcs/heron_input.xml +++ b/tests/integration_tests/mechanics/transfer_funcs/heron_input.xml @@ -64,8 +64,10 @@ - -1 - 0.25 + + + -4 + 1 @@ -82,10 +84,11 @@ - + + -0.9 -1 - -0.01 + -1e-6 -0.2 1 From 6940f48f1a81e9e1dc1f3825a52efd5aebef8b89 Mon Sep 17 00:00:00 2001 From: talbpw Date: Mon, 8 Jan 2024 08:01:01 -0700 Subject: [PATCH 12/24] pivot to handling sign conventions --- src/ValuedParams/Polynomial.py | 2 +- src/dispatch/PyomoModelHandler.py | 4 -- src/dispatch/PyomoRuleLibrary.py | 9 ++--- src/dispatch/putils.py | 3 ++ src/dispatch/pyomo_dispatch.py | 40 ++++++++++--------- .../mechanics/transfer_funcs/heron_input.xml | 33 ++++++++++----- 6 files changed, 51 insertions(+), 40 deletions(-) diff --git a/src/ValuedParams/Polynomial.py b/src/ValuedParams/Polynomial.py index fa114901..ac6aa805 100644 --- a/src/ValuedParams/Polynomial.py +++ b/src/ValuedParams/Polynomial.py @@ -32,7 +32,7 @@ def get_input_specs(cls): coeff.addParam('resource', param_type=InputTypes.StringListType, descr=r"""indicates the resource(s) for which the polynomial coefficient is being provided in this node. Note that the order of the resources matters for specifying the polynomial \xmlAttr{order}.""") - coeff.addParam('order', param_type=InputTypes.FloatListType, + coeff.addParam('order', param_type=InputTypes.IntegerListType, descr=r"""indicates the orders of the polynomial for each resource specified, in order. For example, if \xmlAttr{resources} is ``x, y'', then order ``2,3'' would mean the specified coefficient is for $x^{2}y^{3}$.""") diff --git a/src/dispatch/PyomoModelHandler.py b/src/dispatch/PyomoModelHandler.py index 985afa56..aa6635e8 100644 --- a/src/dispatch/PyomoModelHandler.py +++ b/src/dispatch/PyomoModelHandler.py @@ -140,7 +140,6 @@ def _create_production(self, comp): @ In, meta, dict, dictionary of state variables @ Out, None """ - print('DEBUGG creating production:', comp.name) prod_name = self._create_production_variable(comp) ## if you cannot set limits directly in the production variable, set separate contraint: ## Method 1: set variable bounds directly --> TODO more work needed, but would be nice @@ -330,14 +329,11 @@ def _create_transfer(self, comp, prod_name): @ In, prod_name, str, production variable name @ Out, None """ - print('DEBUGG creating transfer:', comp.name) rule_name = f'{comp.name}_transfer_func' transfer = comp.get_interaction().get_transfer() if transfer is None: - print('DEBUGG skipping transfer:', comp.name) return coeffs = transfer.get_coefficients() - print('DEBUGG coeffs:', coeffs) # dict of form {(r1, r2): {(o1, o2): n}} # where: # r1, r2 are resource names diff --git a/src/dispatch/PyomoRuleLibrary.py b/src/dispatch/PyomoRuleLibrary.py index 30517983..1096941a 100644 --- a/src/dispatch/PyomoRuleLibrary.py +++ b/src/dispatch/PyomoRuleLibrary.py @@ -297,10 +297,9 @@ def transfer_rule(coeffs, r_map, prod_name, m, t) -> bool: for resources, ord_dict in coeffs.items(): for orders, coeff in ord_dict.items(): term = coeff - for i, res in enumerate(resources): - print(f'DEBUGG ... rsr {resources}, term {term}, i {i}, res {res}') - r = r_map[res] - prod = activity[r, t] - term *= prod ** orders[i] + for r, res in enumerate(resources): + map_index = r_map[res] + prod = activity[map_index, t] + term *= prod ** orders[r] eqn += term return eqn == 0 diff --git a/src/dispatch/putils.py b/src/dispatch/putils.py index b07aec68..8c20a20b 100644 --- a/src/dispatch/putils.py +++ b/src/dispatch/putils.py @@ -2,6 +2,7 @@ """ import numpy as np import pyomo.environ as pyo +from pprint import pprint def get_all_resources(components): """ @@ -121,6 +122,8 @@ def debug_pyomo_print(m) -> None: @ Out, None """ print('/' + '='*80) + print('DEBUGG resource map:') + pprint(m.resource_index_map) print('DEBUGG model pieces:') print(' -> objective:') print(' ', m.obj.pprint()) diff --git a/src/dispatch/pyomo_dispatch.py b/src/dispatch/pyomo_dispatch.py index 44d7de51..d3c6c4dc 100644 --- a/src/dispatch/pyomo_dispatch.py +++ b/src/dispatch/pyomo_dispatch.py @@ -67,19 +67,19 @@ def get_input_specs(cls): """ specs = InputData.parameterInputFactory( 'pyomo', ordered=False, baseNode=None, - descr=r"""The \texttt{pyomo} dispatcher uses analytic modeling and rolling - windows to solve dispatch optimization with perfect information via the + descr=r"""The \texttt{pyomo} dispatcher uses analytic modeling and rolling + windows to solve dispatch optimization with perfect information via the pyomo optimization library.""" ) specs.addSub( InputData.parameterInputFactory( 'rolling_window_length', contentType=InputTypes.IntegerType, - descr=r"""Sets the length of the rolling window that the Pyomo optimization - algorithm uses to break down histories. Longer window lengths will minimize - boundary effects, such as nonoptimal storage dispatch, at the cost of slower - optimization solves. Note that if the rolling window results in a window - of length 1 (such as at the end of a history), this can cause problems for pyomo. + descr=r"""Sets the length of the rolling window that the Pyomo optimization + algorithm uses to break down histories. Longer window lengths will minimize + boundary effects, such as nonoptimal storage dispatch, at the cost of slower + optimization solves. Note that if the rolling window results in a window + of length 1 (such as at the end of a history), this can cause problems for pyomo. \default{24}""" ) ) @@ -87,7 +87,7 @@ def get_input_specs(cls): specs.addSub( InputData.parameterInputFactory( 'debug_mode', contentType=InputTypes.BoolType, - descr=r"""Enables additional printing in the pyomo dispatcher. + descr=r"""Enables additional printing in the pyomo dispatcher. Highly discouraged for production runs. \default{False}.""" ) ) @@ -95,7 +95,7 @@ def get_input_specs(cls): specs.addSub( InputData.parameterInputFactory( 'solver', contentType=InputTypes.StringType, - descr=r"""Indicates which solver should be used by pyomo. Options depend + descr=r"""Indicates which solver should be used by pyomo. Options depend on individual installation. \default{'glpk' for Windows, 'cbc' otherwise}.""" ) ) @@ -103,8 +103,8 @@ def get_input_specs(cls): specs.addSub( InputData.parameterInputFactory( 'tol', contentType=InputTypes.FloatType, - descr=r"""Relative tolerance for converging final optimal dispatch solutions. - Specific implementation depends on the solver selected. Changing this value + descr=r"""Relative tolerance for converging final optimal dispatch solutions. + Specific implementation depends on the solver selected. Changing this value could have significant impacts on the dispatch optimization time and quality. \default{solver dependent, often 1e-6}.""" ) @@ -139,7 +139,7 @@ def read_input(self, specs) -> None: window_len_node = specs.findFirst('rolling_window_length') if window_len_node is not None: self._window_len = window_len_node.value - + debug_node = specs.findFirst('debug_mode') if debug_node is not None: self.debug_mode = debug_node.value @@ -162,7 +162,7 @@ def read_input(self, specs) -> None: self.solve_options[key] = solver_tol else: raise ValueError(f"Tolerance setting not available for solver '{self._solver}'.") - + def _check_solver_availability(self, requested_solver: str) -> str: """ @@ -174,13 +174,13 @@ def _check_solver_availability(self, requested_solver: str) -> str: for solver in solvers_to_check: if self._is_solver_available(solver): return solver - + all_options = pyo.SolverFactory._cls.keys() available_solvers = [op for op in all_options if not op.startswith('_') and self._is_solver_available(op)] raise RuntimeError( f'Requested solver "{requested_solver} not found. Available options may include: {available_solvers}.' ) - + def _is_solver_available(self, solver: str) -> bool: """ @@ -249,7 +249,7 @@ def _get_initial_storage_levels(components: list, meta: dict, start_index: int) # NOTE: There used to be an else conditional here that depended on the # variable `subdisp` which was not defined yet. Leaving an unreachable # branch of code, thus, I removed it. So currently, this function assumes - # start_index will always be zero, otherwise it will return an empty dict. + # start_index will always be zero, otherwise it will return an empty dict. # Here was the line in case we need it in the future: # else: initial_levels[comp] = subdisp[comp.name]['level'][comp.get_interaction().get_resource()][-1] return initial_levels @@ -285,11 +285,11 @@ def _handle_dispatch_window_solve(self, specific_time, start_index, case, compon if conv_counter >= self._picard_limit and not converged: raise DispatchError(f"Convergence not reached after {self._picard_limit} iterations.") - + else: # No convergence process needed pass - + end = time_mod.time() solve_time = end - start return subdisp, solve_time @@ -324,7 +324,7 @@ def check_converged(self, new, old, components, tol=1e-4): """ if old is None: return False - + for comp in components: intr = comp.get_interaction() if intr.is_governed(): # by "is_governed" we mean "isn't optimized in pyomo" @@ -508,6 +508,8 @@ def _solve_dispatch(self, m, meta): if self.debug_mode: soln.write() putils.debug_print_soln(m) + else: + print('DEBUGG debug mode is turned off!') # return dict of numpy arrays result = putils.retrieve_solution(m) return result diff --git a/tests/integration_tests/mechanics/transfer_funcs/heron_input.xml b/tests/integration_tests/mechanics/transfer_funcs/heron_input.xml index 8dc4a2aa..97f1bc72 100644 --- a/tests/integration_tests/mechanics/transfer_funcs/heron_input.xml +++ b/tests/integration_tests/mechanics/transfer_funcs/heron_input.xml @@ -29,7 +29,10 @@ 50 - + + ipopt + True + @@ -45,7 +48,7 @@ - + @@ -66,7 +69,7 @@ - -4 + 4 1 @@ -76,7 +79,10 @@ - + + + + - -0.9 -1 -1e-6 - -0.2 1 @@ -97,12 +100,12 @@ 2 - + --> - -1e3 + -6e3 @@ -121,11 +124,19 @@ - -1e4 + -150 3 + + + funding + + + 1 + + From 5533dd7135a1630c69e06a9648a03b54909d6132 Mon Sep 17 00:00:00 2001 From: talbpw Date: Wed, 24 Jan 2024 13:05:52 -0700 Subject: [PATCH 13/24] stash before rebase --- src/Components.py | 290 +++++++++++++++++++++----------- src/ValuedParamHandler.py | 8 + src/ValuedParams/Linear.py | 6 +- src/ValuedParams/Parametric.py | 37 ++++ src/ValuedParams/ValuedParam.py | 9 + 5 files changed, 248 insertions(+), 102 deletions(-) diff --git a/src/Components.py b/src/Components.py index ed244b02..7250743f 100644 --- a/src/Components.py +++ b/src/Components.py @@ -33,6 +33,7 @@ def factory(xml, method='sweep'): """ comp = Component() comp.read_input(xml, method) + comp.finalize_init() return comp class Component(Base, CashFlowUser): @@ -130,6 +131,14 @@ def read_input(self, xml, mode): self.raiseAnError(IOError, f' node missing from component "{self.name}"!') CashFlowUser.read_input(self, econ_node) + def finalize_init(self): + """ + Finalizes the initialization of the component, and checks input settings. + @ In, None + @ Out, None + """ + self.get_interaction().finalize_init() + def get_crossrefs(self): """ Collect the required value entities needed for this component to function. @@ -282,60 +291,61 @@ def set_capacity(self, cap): """ return self.get_interaction().set_capacity(cap) - def produce(self, request, meta, raven_variables, dispatch, t, level=None): - """ - Enacts the transfer function for this component to act based on a request. - FIXME was used for "generic" dispatcher, does it still apply? - @ In, request, dict, mapping of requested resource usage to amount requested (negative is - consume, positive is produce) - @ In, meta, dict, metadata information for current status in run - @ In, raven_variables, dict, variables from RAVEN TODO part of meta! - @ In, dispatch, DispatchState, expression of the current activity levels in the system - @ In, t, int, index of "time" at which this production should be performed - @ In, level, float, for storages indicates the amount currently stored - @ Out, balance, dict, full dict of resources used and produced for request - @ Out, meta, dict, updated metadata dictionary - """ - #balance = defaultdict(float) - interaction = self.get_interaction() - balance, meta = interaction.produce(request, meta, raven_variables, dispatch, t, level) - #for resource, quantity in int_balance.items(): - # balance[resource] += quantity - return balance, meta - - def produce_max(self, meta, raven_variables, dispatch, t): - """ - Determines the maximum production possible for this component. - @ In, meta, dict, metadata information for current status in run - @ In, raven_variables, dict, variables from RAVEN TODO part of meta! - @ In, dispatch, DispatchState, expression of the current activity levels in the system - @ In, t, int, index of "time" at which this production should be performed - @ Out, balance, dict, full dict of resources used and produced for request - @ Out, meta, dict, updated metadata dictionary - """ - #balance = defaultdict(float) - interaction = self.get_interaction() - balance, meta = interaction.produce_max(meta, raven_variables, dispatch, t) - #for resource, quantity in int_balance.items(): - # balance[resource] += quantity - return balance, meta - - def produce_min(self, meta, raven_variables, dispatch, t): - """ - Determines the minimum production possible for this component. - @ In, meta, dict, metadata information for current status in run - @ In, raven_variables, dict, variables from RAVEN TODO part of meta! - @ In, dispatch, DispatchState, expression of the current activity levels in the system - @ In, t, int, index of "time" at which this production should be performed - @ Out, balance, dict, full dict of resources used and produced for request - @ Out, meta, dict, updated metadata dictionary - """ - #balance = defaultdict(float) - interaction = self.get_interaction() - balance, meta = interaction.produce_min(meta, raven_variables, dispatch, t) - #for resource, quantity in int_balance.items(): - # balance[resource] += quantity - return balance, meta + # FIXME can this be removed? + # def produce(self, request, meta, raven_variables, dispatch, t, level=None): + # """ + # Enacts the transfer function for this component to act based on a request. + # FIXME was used for "generic" dispatcher, does it still apply? + # @ In, request, dict, mapping of requested resource usage to amount requested (negative is + # consume, positive is produce) + # @ In, meta, dict, metadata information for current status in run + # @ In, raven_variables, dict, variables from RAVEN TODO part of meta! + # @ In, dispatch, DispatchState, expression of the current activity levels in the system + # @ In, t, int, index of "time" at which this production should be performed + # @ In, level, float, for storages indicates the amount currently stored + # @ Out, balance, dict, full dict of resources used and produced for request + # @ Out, meta, dict, updated metadata dictionary + # """ + # #balance = defaultdict(float) + # interaction = self.get_interaction() + # balance, meta = interaction.produce(request, meta, raven_variables, dispatch, t, level) + # #for resource, quantity in int_balance.items(): + # # balance[resource] += quantity + # return balance, meta + + # def produce_max(self, meta, raven_variables, dispatch, t): + # """ + # Determines the maximum production possible for this component. + # @ In, meta, dict, metadata information for current status in run + # @ In, raven_variables, dict, variables from RAVEN TODO part of meta! + # @ In, dispatch, DispatchState, expression of the current activity levels in the system + # @ In, t, int, index of "time" at which this production should be performed + # @ Out, balance, dict, full dict of resources used and produced for request + # @ Out, meta, dict, updated metadata dictionary + # """ + # #balance = defaultdict(float) + # interaction = self.get_interaction() + # balance, meta = interaction.produce_max(meta, raven_variables, dispatch, t) + # #for resource, quantity in int_balance.items(): + # # balance[resource] += quantity + # return balance, meta + + # def produce_min(self, meta, raven_variables, dispatch, t): + # """ + # Determines the minimum production possible for this component. + # @ In, meta, dict, metadata information for current status in run + # @ In, raven_variables, dict, variables from RAVEN TODO part of meta! + # @ In, dispatch, DispatchState, expression of the current activity levels in the system + # @ In, t, int, index of "time" at which this production should be performed + # @ Out, balance, dict, full dict of resources used and produced for request + # @ Out, meta, dict, updated metadata dictionary + # """ + # #balance = defaultdict(float) + # interaction = self.get_interaction() + # balance, meta = interaction.produce_min(meta, raven_variables, dispatch, t) + # #for resource, quantity in int_balance.items(): + # # balance[resource] += quantity + # return balance, meta @property def ramp_limit(self): @@ -512,6 +522,37 @@ def _set_valued_param(self, name, comp, spec, mode): self._crossrefs[name] = vp setattr(self, name, vp) + def finalize_init(self): + """ + Post-input reading final initialization. + @ In, None + @ Out, None + """ + # fix up signs: send sign map to valued params + var_map = self.map_vp_signs() + self.set_vp_signs(var_map) + + def map_vp_signs(self): + """ + Collect the signs of VPs based on the input/output of this component + @ In, None + @ Out, var_map, dict, map of variables to their correct sign + """ + var_map = {'negative': set(), 'positive': set()} + return var_map + + def set_vp_signs(self, var_map: dict): + """ + Set the signs of VPs based on the input/output of this component + @ In, var_map, dict, map of variables to their correct sign + @ Out, None + """ + # this list is bespoke + # TODO is there an automated way to come up with valued params that need editing? + self._capacity.set_signs(var_map, to_set=[self.get_capacity_var]) + if self._minimum is not None: + self._minimum.set_signs(var_map) + def get_capacity(self, meta, raw=False): """ Returns the capacity of this interaction. @@ -805,6 +846,29 @@ def read_input(self, specs, mode, comp_name): if self.ramp_limit is not None and not 0 < self.ramp_limit <= 1: self.raiseAnError(IOError, f'Ramp limit must be (0, 1] but got "{self.ramp_limit}"') + def map_vp_signs(self): + """ + Collect the signs of VPs based on the input/output of this component + @ In, None + @ Out, var_map, dict, map of variables to their correct sign + """ + var_map = super().map_vp_signs() + # FIXME speed this up when we show it works + for consumed in self.get_inputs(): + var_map['negative'].add(consumed) + for produced in self.get_outputs(): + var_map['produced'].add(produced) + return var_map + + def set_vp_signs(self, var_map: dict): + """ + Set the signs of VPs based on the input/output of this component + @ In, var_map, dict, map of variables to their correct sign + @ Out, None + """ + super().set_vp_signs(var_map) + self._transfer.set_signs(var_map) + self.ramp_limit.set_signs(var_map) def get_inputs(self): """ @@ -825,7 +889,7 @@ def get_outputs(self): outputs = set(np.atleast_1d(self._produces)) return outputs - def print_me(self, tabs=0, tab=' '): + def print_me(self, tabs: int=0, tab: str=' ') -> None: """ Prints info about self @ In, tabs, int, optional, number of tabs to insert before prints @@ -839,45 +903,46 @@ def print_me(self, tabs=0, tab=' '): self.raiseADebug(pre+' transfer:', self._transfer) self.raiseADebug(pre+' capacity:', self._capacity) - def transfer(self, request, meta, raven_vars, dispatch, t): - """ - Use the transfer function to make a balance of activities that should occur - @ In, request, dict, requested action {resource: amount} - @ In, meta, dict, additional variables to pass through - @ In, raven_vars, dict, TODO part of meta! consolidate! - @ In, dispatch, dict, TODO part of meta! consolidate! - @ In, t, int, TODO part of meta! consolidate! - @ Out, balance, dict, results of requested action - @ Out, meta, dict, additional variable passthrough - """ - assert len(request) == 1 - balance = defaultdict(float) - # in the rare case that the transfer function is simple ... - resources_in = list(self.get_inputs()) - resources_out = list(self.get_outputs()) - inputs = meta - inputs['request'] = request - inputs['t'] = t - inputs['dispatch'] = dispatch - balance, meta = self._transfer.evaluate(inputs) - self.check_expected_present(balance, self.get_resources(), f'TRANSFER FUNCTION {self._transfer}') - # OLD if transfer evaluation is a float (float, arma), then it signifies a conversion rate - ## note that we've checked in the input reading for this singular relationship - - if False: #len(balance) == 1: - requested, rate = list(balance.items())[0] # requested resource and the transfer rate (amount of product per consumed) - amount = list(requested.values())[0] # amount of requested resource - if requested in resources_in: - balance[resources_out[0]] = -1.0 * rate * amount # NOTE: amount should be negative, but output should be positive - else: - balance[inputs[0]] = -1.0 / rate * amount # NOTE: amount should be positive, but input should be negative - # check that all values got filled -> TODO remove this for opt performance - missing = set(resources_in + resources_out) - set(balance.keys()) - if missing: - self.raiseAnError(RuntimeError, 'While evaluating transfer function, not all variables requested were provided!' +\ - f' Missing: {missing}' +\ - f' Transfer function: {self._transfer}') - return balance, meta + # TODO can this be removed? + # def transfer(self, request, meta, raven_vars, dispatch, t): + # """ + # Use the transfer function to make a balance of activities that should occur + # @ In, request, dict, requested action {resource: amount} + # @ In, meta, dict, additional variables to pass through + # @ In, raven_vars, dict, TODO part of meta! consolidate! + # @ In, dispatch, dict, TODO part of meta! consolidate! + # @ In, t, int, TODO part of meta! consolidate! + # @ Out, balance, dict, results of requested action + # @ Out, meta, dict, additional variable passthrough + # """ + # assert len(request) == 1 + # balance = defaultdict(float) + # # in the rare case that the transfer function is simple ... + # resources_in = list(self.get_inputs()) + # resources_out = list(self.get_outputs()) + # inputs = meta + # inputs['request'] = request + # inputs['t'] = t + # inputs['dispatch'] = dispatch + # balance, meta = self._transfer.evaluate(inputs) + # self.check_expected_present(balance, self.get_resources(), f'TRANSFER FUNCTION {self._transfer}') + # # OLD if transfer evaluation is a float (float, arma), then it signifies a conversion rate + # ## note that we've checked in the input reading for this singular relationship + + # if False: #len(balance) == 1: + # requested, rate = list(balance.items())[0] # requested resource and the transfer rate (amount of product per consumed) + # amount = list(requested.values())[0] # amount of requested resource + # if requested in resources_in: + # balance[resources_out[0]] = -1.0 * rate * amount # NOTE: amount should be negative, but output should be positive + # else: + # balance[inputs[0]] = -1.0 / rate * amount # NOTE: amount should be positive, but input should be negative + # # check that all values got filled -> TODO remove this for opt performance + # missing = set(resources_in + resources_out) - set(balance.keys()) + # if missing: + # self.raiseAnError(RuntimeError, 'While evaluating transfer function, not all variables requested were provided!' +\ + # f' Missing: {missing}' +\ + # f' Transfer function: {self._transfer}') + # return balance, meta @@ -965,6 +1030,28 @@ def read_input(self, specs, mode, comp_name): # the capacity is limited by the stored resource. self._capacity_var = self._stores + def map_vp_signs(self): + """ + Collect the signs of VPs based on the input/output of this component + @ In, None + @ Out, var_map, dict, map of variables to their correct sign + """ + var_map = super().map_vp_signs() + for stores in self.get_ouputs(): + var_map['positive'].add(stores) + return var_map + + def set_vp_signs(self, var_map: dict): + """ + Set the signs of VPs based on the input/output of this component + @ In, var_map, dict, map of variables to their correct sign + @ Out, None + """ + super().set_vp_signs(var_map) + self._rate.set_signs(var_map) + self._initial_stored.set_signs(var_map) + # self._strategy? + def get_inputs(self): """ Returns the set of resources that are inputs to this interaction. @@ -1124,7 +1211,6 @@ def __init__(self, **kwargs): """ Interaction.__init__(self, **kwargs) self._demands = None # the resource demanded by this interaction - self._penalty = None # how to penalize for not meeting demand NOT IMPLEMENTED self._tracking_vars = ['production'] def read_input(self, specs, mode, comp_name): @@ -1139,9 +1225,17 @@ def read_input(self, specs, mode, comp_name): # must set demands first, so that "capacity" can access it self._demands = specs.parameterValues['resource'] Interaction.read_input(self, specs, mode, comp_name) - for item in specs.subparts: - if item.getName() == 'penalty': - self._set_valued_param('_rate', comp_name, item, mode) + + def map_vp_signs(self): + """ + Collect the signs of VPs based on the input/output of this component + @ In, None + @ Out, var_map, dict, map of variables to their correct sign + """ + var_map = super().map_vp_signs() + for demand in self.get_inputs(): + var_map['negative'].add(demand) + return var_map def get_inputs(self): """ diff --git a/src/ValuedParamHandler.py b/src/ValuedParamHandler.py index 1e30aa63..bf4c1978 100644 --- a/src/ValuedParamHandler.py +++ b/src/ValuedParamHandler.py @@ -129,6 +129,14 @@ def crosscheck(self, interaction): """ self._vp.crosscheck(interaction) + def set_signs(self, var_map: dict, to_set: list=None) -> None: + """ + Set the signs of the parameters for this VP based on provided conventions. + @ In, var_map, dict, lists of variables under 'positive' or 'negative' keys + @ Out, None + """ + self._vp.set_signs(var_map, to_set=to_set) + def get_source(self): """ Access the source type and name of the VP diff --git a/src/ValuedParams/Linear.py b/src/ValuedParams/Linear.py index 61cb58ea..fd69b154 100644 --- a/src/ValuedParams/Linear.py +++ b/src/ValuedParams/Linear.py @@ -22,9 +22,6 @@ def get_input_specs(cls): @ In, None @ Out, spec, InputData, value-based spec """ - # TODO should this be reconciled with Polynomial? - # Pro: maintainability - # Con: degraded user experience spec = InputData.parameterInputFactory('linear', contentType=InputTypes.StringType, descr=r"""indicates this value should be interpreted as a ratio based on an input value.""") rate = InputData.parameterInputFactory('rate', contentType=InputTypes.FloatType, @@ -55,7 +52,8 @@ def read(self, comp_name, spec, mode, alias_dict=None): for rate_node in spec.findAll('rate'): resource = rate_node.parameterValues['resource'] # linear coefficients are all order 1, no cross-variable multipyling - self._coefficients[tuple([resource])][ONE_TUP] = rate_node.value + # coefficient signs come from activity, not coeffs, so fix that up too + self._coefficients[tuple([resource])][ONE_TUP] = abs(rate_node.value) return [] def evaluate(self, inputs, target_var=None, aliases=None): diff --git a/src/ValuedParams/Parametric.py b/src/ValuedParams/Parametric.py index 142026ec..11019846 100644 --- a/src/ValuedParams/Parametric.py +++ b/src/ValuedParams/Parametric.py @@ -55,6 +55,24 @@ def read(self, comp_name, spec, mode, alias_dict=None): self._debug_value = spec.parameterValues.get('debug_value', None) return [] + def set_signs(self, var_map: dict, to_set: list) -> None: + """ + Sets the signs of stored values for this element according to the map given + @ In, var_map, dict, mapping of negative and positive vars + @ In, to_set, list, variable names to set + @ Out, None + """ + if len(to_set) > 0: + raise RuntimeError(f'Received multipe "to_set" values ({to_set}) in setting signs for {self.__class__.__name__}!') + res = to_set[0] + if res in var_map['positive']: + sgn = 1 + elif res in var_map['negative']: + sgn = -1 + # this works for lists of values; override for floats + for v, value in enumerate(self._parametric): + self._parametric[v] = sgn * value + def get_value(self, debug=False): """ Get the value for this parametric source. @@ -90,6 +108,7 @@ def evaluate(self, inputs, target_var=None, aliases=None): ###### # dummy classes, just for changing descriptions, but they act the same as parameteric class FixedValue(Parametric): + """ For inputs that have a singular fixed value. """ @classmethod def get_input_specs(cls): """ @@ -104,7 +123,24 @@ def get_input_specs(cls): and act as a constant in the inner workflow.""" return spec + def set_signs(self, var_map: dict, to_set: list) -> None: + """ + Sets the signs of stored values for this element according to the map given + @ In, var_map, dict, mapping of negative and positive vars + @ In, to_set, list, variable names to set + @ Out, None + """ + if len(to_set) > 0: + raise RuntimeError(f'Received multipe "to_set" values ({to_set}) in setting signs for {self.__class__.__name__}!') + res = to_set[0] + if res in var_map['positive']: + sgn = 1 + elif res in var_map['negative']: + sgn = -1 + self._parametric = sgn * self._parametric + class OptBounds(Parametric): + """ For inputs that are optimized between lower, upper bounds. """ @classmethod def get_input_specs(cls): """ @@ -120,6 +156,7 @@ def get_input_specs(cls): return spec class SweepValues(Parametric): + """ For inputs that are parametrically evaluated at several points. """ @classmethod def get_input_specs(cls): """ diff --git a/src/ValuedParams/ValuedParam.py b/src/ValuedParams/ValuedParam.py index 051f6c93..99a9980c 100644 --- a/src/ValuedParams/ValuedParam.py +++ b/src/ValuedParams/ValuedParam.py @@ -106,6 +106,15 @@ def set_object(self, obj): """ self._target_obj = obj + def set_signs(self, var_map: dict, to_set: list) -> None: + """ + Sets the signs of elements of this ValuedParam based on keywords. + @ In, var_map, dict, maps of positive and negative signed keywords + @ In, to_set, list, list of variables to set signs for + @ Out, None + """ + # nothing to do by default + def evaluate(self, inputs, target_var=None, aliases=None): """ Evaluate this ValuedParam, wherever it gets its data from From e41a2f62c9b9f656aceb38167b5ac7626b9e2f79 Mon Sep 17 00:00:00 2001 From: talbpw Date: Wed, 24 Jan 2024 13:21:50 -0700 Subject: [PATCH 14/24] more mergefix --- src/dispatch/PyomoModelHandler.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/dispatch/PyomoModelHandler.py b/src/dispatch/PyomoModelHandler.py index 06ad59d4..adfbb129 100644 --- a/src/dispatch/PyomoModelHandler.py +++ b/src/dispatch/PyomoModelHandler.py @@ -370,6 +370,7 @@ def _create_transfer(self, comp, prod_name): # TODO this could also take a transfer function from an external Python function assuming # we're careful about how the expression-vs-float gets used # and figure out how to handle multiple ins, multiple outs + # XXX revert this to coefficients! ratios = putils.get_transfer_coeffs(self.model, comp) ref_r, ref_name, _ = ratios.pop('__reference', (None, None, None)) for resource, ratio in ratios.items(): From fdba9a02ff1c2e85c3a6d24f9f6b14197929b9de Mon Sep 17 00:00:00 2001 From: talbpw Date: Wed, 24 Jan 2024 13:23:35 -0700 Subject: [PATCH 15/24] restored coeff tx for PMH --- src/dispatch/PyomoModelHandler.py | 29 +++++++++++++---------------- 1 file changed, 13 insertions(+), 16 deletions(-) diff --git a/src/dispatch/PyomoModelHandler.py b/src/dispatch/PyomoModelHandler.py index adfbb129..0ec3820a 100644 --- a/src/dispatch/PyomoModelHandler.py +++ b/src/dispatch/PyomoModelHandler.py @@ -363,22 +363,19 @@ def _create_transfer(self, comp, prod_name): @ In, prod_name, str, name of production variable @ Out, None """ - name = comp.name - # transfer functions - # e.g. 2A + 3B -> 1C + 2E - # get linear coefficients - # TODO this could also take a transfer function from an external Python function assuming - # we're careful about how the expression-vs-float gets used - # and figure out how to handle multiple ins, multiple outs - # XXX revert this to coefficients! - ratios = putils.get_transfer_coeffs(self.model, comp) - ref_r, ref_name, _ = ratios.pop('__reference', (None, None, None)) - for resource, ratio in ratios.items(): - r = self.model.resource_index_map[comp][resource] - rule_name = f'{name}_{resource}_{ref_name}_transfer' - rule = lambda mod, t: prl.transfer_rule(ratio, r, ref_r, prod_name, mod, t) - constr = pyo.Constraint(self.model.T, rule=rule) - setattr(self.model, rule_name, constr) + rule_name = f'{comp.name}_transfer_func' + transfer = comp.get_interaction().get_transfer() + if transfer is None: + return + coeffs = transfer.get_coefficients() + # dict of form {(r1, r2): {(o1, o2): n}} + # where: + # r1, r2 are resource names + # o1, o2 are polynomial orders (may not be integers?) + # n is the float polynomial coefficient for the term + rule = lambda mod, t: prl.transfer_rule(coeffs, self.model.resource_index_map[comp], prod_name, mod, t) + constr = pyo.Constraint(self.model.T, rule=rule) + setattr(self.model, rule_name, constr) def _create_storage(self, comp): From 3b1edcfba97a258be97081c31d45e5706cf02523 Mon Sep 17 00:00:00 2001 From: talbpw Date: Wed, 31 Jan 2024 09:09:31 -0700 Subject: [PATCH 16/24] linear tx is running after tinkering with signs --- src/Components.py | 100 +-------------------- tests/unit_tests/test_PyomoModelHandler.py | 71 --------------- 2 files changed, 1 insertion(+), 170 deletions(-) delete mode 100644 tests/unit_tests/test_PyomoModelHandler.py diff --git a/src/Components.py b/src/Components.py index 7250743f..f83a5ec3 100644 --- a/src/Components.py +++ b/src/Components.py @@ -291,62 +291,6 @@ def set_capacity(self, cap): """ return self.get_interaction().set_capacity(cap) - # FIXME can this be removed? - # def produce(self, request, meta, raven_variables, dispatch, t, level=None): - # """ - # Enacts the transfer function for this component to act based on a request. - # FIXME was used for "generic" dispatcher, does it still apply? - # @ In, request, dict, mapping of requested resource usage to amount requested (negative is - # consume, positive is produce) - # @ In, meta, dict, metadata information for current status in run - # @ In, raven_variables, dict, variables from RAVEN TODO part of meta! - # @ In, dispatch, DispatchState, expression of the current activity levels in the system - # @ In, t, int, index of "time" at which this production should be performed - # @ In, level, float, for storages indicates the amount currently stored - # @ Out, balance, dict, full dict of resources used and produced for request - # @ Out, meta, dict, updated metadata dictionary - # """ - # #balance = defaultdict(float) - # interaction = self.get_interaction() - # balance, meta = interaction.produce(request, meta, raven_variables, dispatch, t, level) - # #for resource, quantity in int_balance.items(): - # # balance[resource] += quantity - # return balance, meta - - # def produce_max(self, meta, raven_variables, dispatch, t): - # """ - # Determines the maximum production possible for this component. - # @ In, meta, dict, metadata information for current status in run - # @ In, raven_variables, dict, variables from RAVEN TODO part of meta! - # @ In, dispatch, DispatchState, expression of the current activity levels in the system - # @ In, t, int, index of "time" at which this production should be performed - # @ Out, balance, dict, full dict of resources used and produced for request - # @ Out, meta, dict, updated metadata dictionary - # """ - # #balance = defaultdict(float) - # interaction = self.get_interaction() - # balance, meta = interaction.produce_max(meta, raven_variables, dispatch, t) - # #for resource, quantity in int_balance.items(): - # # balance[resource] += quantity - # return balance, meta - - # def produce_min(self, meta, raven_variables, dispatch, t): - # """ - # Determines the minimum production possible for this component. - # @ In, meta, dict, metadata information for current status in run - # @ In, raven_variables, dict, variables from RAVEN TODO part of meta! - # @ In, dispatch, DispatchState, expression of the current activity levels in the system - # @ In, t, int, index of "time" at which this production should be performed - # @ Out, balance, dict, full dict of resources used and produced for request - # @ Out, meta, dict, updated metadata dictionary - # """ - # #balance = defaultdict(float) - # interaction = self.get_interaction() - # balance, meta = interaction.produce_min(meta, raven_variables, dispatch, t) - # #for resource, quantity in int_balance.items(): - # # balance[resource] += quantity - # return balance, meta - @property def ramp_limit(self): """ @@ -903,47 +847,6 @@ def print_me(self, tabs: int=0, tab: str=' ') -> None: self.raiseADebug(pre+' transfer:', self._transfer) self.raiseADebug(pre+' capacity:', self._capacity) - # TODO can this be removed? - # def transfer(self, request, meta, raven_vars, dispatch, t): - # """ - # Use the transfer function to make a balance of activities that should occur - # @ In, request, dict, requested action {resource: amount} - # @ In, meta, dict, additional variables to pass through - # @ In, raven_vars, dict, TODO part of meta! consolidate! - # @ In, dispatch, dict, TODO part of meta! consolidate! - # @ In, t, int, TODO part of meta! consolidate! - # @ Out, balance, dict, results of requested action - # @ Out, meta, dict, additional variable passthrough - # """ - # assert len(request) == 1 - # balance = defaultdict(float) - # # in the rare case that the transfer function is simple ... - # resources_in = list(self.get_inputs()) - # resources_out = list(self.get_outputs()) - # inputs = meta - # inputs['request'] = request - # inputs['t'] = t - # inputs['dispatch'] = dispatch - # balance, meta = self._transfer.evaluate(inputs) - # self.check_expected_present(balance, self.get_resources(), f'TRANSFER FUNCTION {self._transfer}') - # # OLD if transfer evaluation is a float (float, arma), then it signifies a conversion rate - # ## note that we've checked in the input reading for this singular relationship - - # if False: #len(balance) == 1: - # requested, rate = list(balance.items())[0] # requested resource and the transfer rate (amount of product per consumed) - # amount = list(requested.values())[0] # amount of requested resource - # if requested in resources_in: - # balance[resources_out[0]] = -1.0 * rate * amount # NOTE: amount should be negative, but output should be positive - # else: - # balance[inputs[0]] = -1.0 / rate * amount # NOTE: amount should be positive, but input should be negative - # # check that all values got filled -> TODO remove this for opt performance - # missing = set(resources_in + resources_out) - set(balance.keys()) - # if missing: - # self.raiseAnError(RuntimeError, 'While evaluating transfer function, not all variables requested were provided!' +\ - # f' Missing: {missing}' +\ - # f' Transfer function: {self._transfer}') - # return balance, meta - class Storage(Interaction): @@ -1247,7 +1150,7 @@ def get_inputs(self): inputs.update(np.atleast_1d(self._demands)) return inputs - def print_me(self, tabs=0, tab=' '): + def print_me(self, tabs: int=0, tab: str=' ') -> None: """ Prints info about self @ In, tabs, int, optional, number of tabs to insert before prints @@ -1257,5 +1160,4 @@ def print_me(self, tabs=0, tab=' '): pre = tab*tabs self.raiseADebug(pre+'Demand/Load:') self.raiseADebug(pre+' demands:', self._demands) - self.raiseADebug(pre+' penalty:', self._penalty) self.raiseADebug(pre+' capacity:', self._capacity) diff --git a/tests/unit_tests/test_PyomoModelHandler.py b/tests/unit_tests/test_PyomoModelHandler.py deleted file mode 100644 index e464db8e..00000000 --- a/tests/unit_tests/test_PyomoModelHandler.py +++ /dev/null @@ -1,71 +0,0 @@ -''' - Test specific aspects of HERON Pyomo dispatcher's PyomoModeHandler -''' - -import os -import sys -import pytest -import xml.etree.ElementTree as ET - -# Load HERON tools -HERON_LOC = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir)) -sys.path.append(HERON_LOC) -from HERON.src import Components -from HERON.src import _utils as hutils -from HERON.src.ValuedParamHandler import ValuedParamHandler as VPH -from HERON.src.ValuedParams import factory as VPfactory -from HERON.src.dispatch.PyomoModelHandler import PyomoModelHandler as PMH -pmh = PMH(0, 0, None, None, None, None, None) -sys.path.pop() - -import pyomo.environ as pyo -try: - import ravenframework -except ModuleNotFoundError: - # Load RAVEN tools - sys.path.append(hutils.get_raven_loc()) -#from ravenframework.utils import InputData, xmlUtils,InputTypes -import ravenframework.MessageHandler as MessageHandler - - -@pytest.fixture -def component(): - transfer = VPfactory.returnInstance('poly') - # encode af^2 + bf + cf^0.2 + k_1 = dw + k_2 - # becomes af^2 + bf + cf^0.2 + k - dw = 0 - # divide out d for new a, b, c, k - # af^2 + bf + cf^0.2 + k - w = 0 - # where: - # f is "funding" in [0,400], - # w is "work" in ~ 37 when x~14.5 - # a, b, c, d, are all scalar coefficients: - # a = 1e-3 - # b = -0.5 - # c = 20 - # k = 10 - coeffs = { - ('funding'): {(2): 1e-3, (1): -0.5, (0.2): 20, (0): 10} - } - transfer._coefficients = coeffs - comp = Components.Producer() - comp.name = 'postdoc' - comp.messageHandler = MessageHandler.MessageHandler() - comp.messageHandler.verbosity = 'debug' - comp._capacity_var = 'funding' - comp._capacity = VPH('researcher_capacity') - comp._capacity.set_const_VP(0) - comp.set_capacity(400) - comp._transfer = transfer - yield comp - -class TestHeronPMH: - - def test_prod_tracker(self, component): - assert component.get_tracking_vars() == ['production'] - - def test_transfer(self, component): - m = pyo.ConcreteModel() - pmh._create_transfer(m, component) - print(m) - assert False - From 73beec446f0322b39be563ab8d5500c5674f48d1 Mon Sep 17 00:00:00 2001 From: talbpw Date: Wed, 14 Feb 2024 12:18:07 -0700 Subject: [PATCH 17/24] stash, need to split linear and poly in to ratio and poly, no relation --- src/ValuedParams/Linear.py | 21 +++++++++++++++++++++ src/ValuedParams/Parametric.py | 22 +++------------------- src/main.py | 1 - 3 files changed, 24 insertions(+), 20 deletions(-) diff --git a/src/ValuedParams/Linear.py b/src/ValuedParams/Linear.py index fd69b154..5a6d1277 100644 --- a/src/ValuedParams/Linear.py +++ b/src/ValuedParams/Linear.py @@ -5,6 +5,8 @@ Values that are expressed as linear ratios of one another. Primarily intended for transfer functions. """ +import numpy as np + from .ValuedParam import ValuedParam, InputData, InputTypes from .Polynomial import Polynomial @@ -56,6 +58,25 @@ def read(self, comp_name, spec, mode, alias_dict=None): self._coefficients[tuple([resource])][ONE_TUP] = abs(rate_node.value) return [] + def set_signs(self, var_map: dict, to_set: list) -> None: + """ + Sets the signs of stored values for this element according to the map given + @ In, var_map, dict, mapping of negative and positive vars + @ In, to_set, list, variable names to set + @ Out, None + """ + aaaaa + for resources, orders_dict in self._coefficients.items(): + res = resources[0] + val = orders_dict[ONE_TUP] + print(f'DEBUGG res: "{res}", val {val}') + if res in var_map['positive']: + sgn = 1 + elif res in var_map['negative']: + sgn = -1 + print(f'DEBUGG res: "{res}", val {val}, sgn: {sgn}') + orders_dict[ONE_TUP] = sgn * np.abs(val) + def evaluate(self, inputs, target_var=None, aliases=None): """ Evaluate this ValuedParam, wherever it gets its data from diff --git a/src/ValuedParams/Parametric.py b/src/ValuedParams/Parametric.py index 3c239893..61923e23 100644 --- a/src/ValuedParams/Parametric.py +++ b/src/ValuedParams/Parametric.py @@ -5,6 +5,8 @@ Values that are swept, optimized, or fixed in the "outer" workflow, so end up being constants in the "inner" workflow. """ +import numpy as np + from .ValuedParam import ValuedParam, InputData, InputTypes # class for custom dynamically-evaluated quantities @@ -70,9 +72,7 @@ def set_signs(self, var_map: dict, to_set: list) -> None: sgn = 1 elif res in var_map['negative']: sgn = -1 - # this works for lists of values; override for floats - for v, value in enumerate(self._parametric): - self._parametric[v] = sgn * value + self._parametric = sgn * np.abs(self._parametric) def get_value(self, debug=False): """ @@ -124,22 +124,6 @@ def get_input_specs(cls): and act as a constant in the inner workflow.""" return spec - def set_signs(self, var_map: dict, to_set: list) -> None: - """ - Sets the signs of stored values for this element according to the map given - @ In, var_map, dict, mapping of negative and positive vars - @ In, to_set, list, variable names to set - @ Out, None - """ - if len(to_set) > 0: - raise RuntimeError(f'Received multipe "to_set" values ({to_set}) in setting signs for {self.__class__.__name__}!') - res = to_set[0] - if res in var_map['positive']: - sgn = 1 - elif res in var_map['negative']: - sgn = -1 - self._parametric = sgn * self._parametric - class OptBounds(Parametric): """ For inputs that are optimized between lower, upper bounds. """ @classmethod diff --git a/src/main.py b/src/main.py index c1983ced..f3030407 100755 --- a/src/main.py +++ b/src/main.py @@ -5,7 +5,6 @@ Runs HERON. """ import os -from queue import Empty import sys import argparse From ec3c45edc88212b1505f862cbf923ccb107dc030 Mon Sep 17 00:00:00 2001 From: talbpw Date: Wed, 14 Feb 2024 13:05:03 -0700 Subject: [PATCH 18/24] stash --- src/ValuedParams/Factory.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/ValuedParams/Factory.py b/src/ValuedParams/Factory.py index 6083bbd0..b098a40f 100644 --- a/src/ValuedParams/Factory.py +++ b/src/ValuedParams/Factory.py @@ -79,6 +79,7 @@ def make_input_specs(self, name, descr=None, allowed=None, kind='singular'): factory.registerType('uncertainty', RandomVariable) # ratios, transfers factory.registerType('linear', Linear) +factory.registerType('ratio', Linear) factory.registerType('poly', Polynomial) # TODO add: ROM From a5d17504e6fbce43f5cce7f257901c5feaacb8a3 Mon Sep 17 00:00:00 2001 From: talbpw Date: Thu, 15 Feb 2024 09:21:22 -0700 Subject: [PATCH 19/24] tests passing, need to finish poly test --- src/Components.py | 35 ++++-- src/TransferFuncs/Factory.py | 42 ++++++++ .../Polynomial.py | 38 ++----- src/TransferFuncs/Ratio.py | 76 +++++++++++++ src/TransferFuncs/TransferFunc.py | 81 ++++++++++++++ src/TransferFuncs/__init__.py | 15 +++ src/ValuedParams/Factory.py | 13 +-- src/ValuedParams/Linear.py | 100 ------------------ src/dispatch/PyomoModelHandler.py | 38 ++++++- src/dispatch/PyomoRuleLibrary.py | 16 ++- 10 files changed, 299 insertions(+), 155 deletions(-) create mode 100644 src/TransferFuncs/Factory.py rename src/{ValuedParams => TransferFuncs}/Polynomial.py (62%) create mode 100644 src/TransferFuncs/Ratio.py create mode 100644 src/TransferFuncs/TransferFunc.py create mode 100644 src/TransferFuncs/__init__.py delete mode 100644 src/ValuedParams/Linear.py diff --git a/src/Components.py b/src/Components.py index f83a5ec3..7bf51667 100644 --- a/src/Components.py +++ b/src/Components.py @@ -12,6 +12,7 @@ import xml.etree.ElementTree as ET from HERON.src.Economics import CashFlowUser from HERON.src.ValuedParams import factory as vp_factory +from HERON.src.TransferFuncs import factory as tf_factory from HERON.src.ValuedParamHandler import ValuedParamHandler from HERON.src import _utils as hutils @@ -453,10 +454,10 @@ def read_input(self, specs, mode, comp_name): def _set_valued_param(self, name, comp, spec, mode): """ - Sets up use of a ValuedParam for this interaction for the "name" attribute of this class. + sets up use of a valuedparam for this interaction for the "name" attribute of this class. @ In, name, str, name of member of this class @ In, comp, str, name of associated component - @ In, spec, InputParam, input specifications + @ In, spec, inputparam, input specifications @ In, mode, string, case mode to operate in (e.g. 'sweep' or 'opt') @ Out, None """ @@ -717,11 +718,10 @@ def get_input_specs(cls): ) ) specs.addSub( - vp_factory.make_input_specs( + tf_factory.make_input_specs( 'transfer', - kind='transfer', - descr=r"""describes how input resources yield output resources for this - component's transfer function.""", + descr=r"""describes the balance between consumed and produced resources for this + component.""", ) ) specs.addSub( @@ -775,7 +775,7 @@ def read_input(self, specs, mode, comp_name): if item.getName() == 'consumes': self._consumes = item.value elif item.getName() == 'transfer': - self._set_valued_param('_transfer', comp_name, item, mode) + self._set_transfer_func('_transfer', comp_name, item) elif item.getName() == 'ramp_limit': self.ramp_limit = item.value elif item.getName() == 'ramp_freq': @@ -786,10 +786,31 @@ def read_input(self, specs, mode, comp_name): if self._transfer is None: if self._consumes: self.raiseAnError(IOError, 'Any component that consumes a resource must have a transfer function describing the production process!') + ## transfer elements are all in IO list + if self._transfer is not None: + self._transfer.check_io(self.get_inputs(), self.get_outputs()) ## ramp limit is (0, 1] if self.ramp_limit is not None and not 0 < self.ramp_limit <= 1: self.raiseAnError(IOError, f'Ramp limit must be (0, 1] but got "{self.ramp_limit}"') + def _set_transfer_func(self, name, comp, spec): + """ + Sets up a Transfer Function + @ In, name, str, name of member of this class + @ In, comp, str, name of associated component + @ In, spec, inputparam, input specifications + @ Out, None + """ + known = tf_factory.knownTypes() + found = False + for sub in spec.subparts: + if sub.getName() in known: + if found: + self.raiseAnError(IOError, f'Received multiple Transfer Functions for component "{name}"!') + self._transfer = tf_factory.returnInstance(sub.getName()) + self._transfer.read(comp, spec) + found = True + def map_vp_signs(self): """ Collect the signs of VPs based on the input/output of this component diff --git a/src/TransferFuncs/Factory.py b/src/TransferFuncs/Factory.py new file mode 100644 index 00000000..75114cf8 --- /dev/null +++ b/src/TransferFuncs/Factory.py @@ -0,0 +1,42 @@ + +# Copyright 2020, Battelle Energy Alliance, LLC +# ALL RIGHTS RESERVED + +from ravenframework.utils import InputData, InputTypes +from ravenframework.EntityFactoryBase import EntityFactory + +from .Ratio import Ratio +from .Polynomial import Polynomial + +class TransferFuncFactory(EntityFactory): + """ + Factory for Transfer Functions + """ + def make_input_specs(self, name, descr=None): + """ + Fill input specs for the provided name and description. + @ In, name, str, name of new spec + @ In, descr, str, optional, description of spec + @ In, allowed, list, optional, string list of allowable types of ValuedParam. Overrides "kind". + @ In, kind, str, optional, kind of ValuedParam grouping (default) + @ Out, spec, InputData, specification + """ + add_descr = '' + if descr is None: + description = add_descr + else: + description = descr + r"""\\ \\""" + add_descr + + spec = InputData.parameterInputFactory(name, descr=description) + for name, klass in self._registeredTypes.items(): + sub_spec = klass.get_input_specs() + sub_spec.name = name + spec.addSub(sub_spec) + return spec + +factory = TransferFuncFactory('TransferFunc') + +# fixed in inner +factory.registerType('ratio', Ratio) +factory.registerType('linear', Ratio) +factory.registerType('poly', Polynomial) diff --git a/src/ValuedParams/Polynomial.py b/src/TransferFuncs/Polynomial.py similarity index 62% rename from src/ValuedParams/Polynomial.py rename to src/TransferFuncs/Polynomial.py index ac6aa805..d7b0417e 100644 --- a/src/ValuedParams/Polynomial.py +++ b/src/TransferFuncs/Polynomial.py @@ -2,15 +2,14 @@ # Copyright 2020, Battelle Energy Alliance, LLC # ALL RIGHTS RESERVED """ - Values that are expressed as a polynomial relationship. + Transfer fucntions that are expressed as a polynomial relationship. For example, ax^2 + bxy + cy^2 + dx + ey = fm^2 + gmn + hn^2 + im + jn + k - Primarily intended for transfer functions. """ from collections import defaultdict -from .ValuedParam import ValuedParam, InputData, InputTypes +from .TransferFunc import TransferFunc, InputData, InputTypes -class Polynomial(ValuedParam): +class Polynomial(TransferFunc): """ Represents a ValuedParam that is a polynomial relationship. """ @@ -51,22 +50,18 @@ def __init__(self): # (water): {(1): 1.61} } self._coefficients = defaultdict(dict) - def read(self, comp_name, spec, mode, alias_dict=None): + def read(self, comp_name, spec): """ Used to read valued param from XML input @ In, comp_name, str, name of component that this valued param will be attached to; only used for print messages @ In, spec, InputData params, input specifications - @ In, mode, type of simulation calculation - @ In, alias_dict, dict, optional, aliases to use for variable naming - @ Out, needs, list, signals needed to evaluate this ValuedParam at runtime + @ Out, None """ - super().read(comp_name, spec, mode, alias_dict=None) + super().read(comp_name, spec) for coeff_node in spec.findAll('coeff'): resource = coeff_node.parameterValues['resource'] order = coeff_node.parameterValues['order'] - # CHECKME does this preserve order correctly? self._coefficients[tuple(resource)][tuple(order)] = coeff_node.value - return [] def get_coefficients(self): """ @@ -75,24 +70,3 @@ def get_coefficients(self): @ Out, coeffs, dict, coefficient mapping """ return self._coefficients - - def evaluate(self, inputs, target_var=None, aliases=None): - """ - Evaluate this ValuedParam, wherever it gets its data from - @ In, inputs, dict, stuff from RAVEN, particularly including the keys 'meta' and 'raven_vars' - @ In, target_var, str, optional, requested outgoing variable name if not None - @ In, aliases, dict, optional, alternate variable names for searching in variables - @ Out, balance, dict, dictionary of resulting evaluation as {vars: vals} - @ Out, inputs, dict, dictionary of meta (possibly changed during evaluation) - """ - if target_var not in self._coefficients: - self.raiseAnError(RuntimeError, f'"rate" for target variable "{target_var}" not found for ' + - f'ValuedParam {self.name}!') - req_res, req_amt = next(iter(inputs['request'].items())) - req_rate = self._coefficients[req_res] - balance = {req_res: req_amt} - for res, rate in self._coefficients.items(): - if res == req_res: - continue - balance[res] = rate / req_rate * req_amt - return balance, inputs diff --git a/src/TransferFuncs/Ratio.py b/src/TransferFuncs/Ratio.py new file mode 100644 index 00000000..5c10befe --- /dev/null +++ b/src/TransferFuncs/Ratio.py @@ -0,0 +1,76 @@ + +# Copyright 2020, Battelle Energy Alliance, LLC +# ALL RIGHTS RESERVED +""" + Values that are expressed as linear ratios of one another. + Primarily intended for transfer functions. +""" +from .TransferFunc import TransferFunc, InputData, InputTypes + +# class for custom dynamically-evaluated quantities +class Ratio(TransferFunc): + """ + Represents a TransferFunc that uses a linear balance of resources, such as 3a + 7b -> 2c. + This means the ratios of the resources must be maintained, NOT 3a + 7b = 2c! + """ + + @classmethod + def get_input_specs(cls): + """ + Input specification for this class. + @ In, None + @ Out, spec, InputData, value-based spec + """ + spec = InputData.parameterInputFactory('ratio', contentType=InputTypes.StringType, + descr=r"""indicates this transfer function is a constant linear combination of resources. For example, + (3a, 7b, -2c) means that the ratio of (3, 7, -2) must be maintained between (a, b, c) for all + production levels. For an equation-based transfer function instead of balance ratio, see Polynomial.""") + rate = InputData.parameterInputFactory('rate', contentType=InputTypes.FloatType, + descr=r"""linear coefficient for the indicated \xmlAttr{resource}.""") + rate.addParam('resource', param_type=InputTypes.StringType, + descr=r"""indicates the resource for which the linear transfer ratio is being provided in this node.""") + spec.addSub(rate) + return spec + + def __init__(self): + """ + Constructor. + @ In, None + @ Out, None + """ + super().__init__() + self._coefficients = None # ratios, stored in a dict as key: value + + def read(self, comp_name, spec): + """ + Used to read transfer func from XML input + @ In, comp_name, str, name of component that this valued param will be attached to; only used for print messages + @ In, spec, InputData params, input specifications + @ Out, None + """ + super().read(comp_name, spec) + self._coefficients = {} + node = spec.findFirst('ratio') + # ALIAS SUPPORT + if node is None: + self.raiseAWarning('"linear" has been deprecated and will be removed in the future; see "ratio" transfer function!') + node = spec.findFirst('linear') + for rate_node in node.findAll('rate'): + resource = rate_node.parameterValues['resource'] + self._coefficients[resource] = rate_node.value + + def get_resources(self): + """ + Provides the resources used in this transfer function. + @ In, None + @ Out, resources, set, set of resources used + """ + return set(self._coefficients.keys()) + + def get_coefficients(self): + """ + Returns linear coefficients. + @ In, None + @ Out, coeffs, dict, coefficient mapping + """ + return self._coefficients diff --git a/src/TransferFuncs/TransferFunc.py b/src/TransferFuncs/TransferFunc.py new file mode 100644 index 00000000..ac5c2195 --- /dev/null +++ b/src/TransferFuncs/TransferFunc.py @@ -0,0 +1,81 @@ + +# Copyright 2020, Battelle Energy Alliance, LLC +# ALL RIGHTS RESERVED +""" + Defines the TransferFunc entity. + These define the transfer functions for generating Components. +""" +import sys +from HERON.src import _utils as hutils +try: + import ravenframework +except ModuleNotFoundError: + framework_path = hutils.get_raven_loc() + sys.path.append(framework_path) +from ravenframework.utils import InputData, InputTypes +from ravenframework.BaseClasses import MessageUser + +# class for potentially dynamically-evaluated quantities +class TransferFunc(MessageUser): + """ + These define the transfer functions for generating Components. + """ + + @classmethod + def get_input_specs(cls, name): + """ + Define inputs for this VP. + @ In, name, string, name for spec (tag) + @ Out, spec, InputData, value-based spec + """ + spec = InputData.parameterInputFactory(name) + return spec + + def __init__(self): + """ + Constructor. + @ In, name, str, name of this valued param + @ Out, None + """ + super().__init__() + self.type = self.__class__.__name__ # class type, for easy checking + self._comp = None # component who uses this transfer function + + def __repr__(self) -> str: + """ + Return Object Representation String + @ In, None + @ Out, None + """ + return "" + + def read(self, comp_name, spec): + """ + Used to read transfer function from XML input + @ In, comp_name, str, name of component that this transfer function is describing + @ In, spec, InputData params, input specifications + @ Out, needs, list, signals needed to evaluate this ValuedParam at runtime + """ + + def check_io(self, inputs, outputs): + """ + Checks that the transfer function uses all and only the resources used by the component. + @ In, inputs, list, list of input resources to check against. + @ In, outputs, list, list of output resources to check against. + @ Out, None + """ + used = self.get_resources() + inps = set(inputs) + outs = set(outputs) + excess_inputs = inps - used + excess_outputs = outs - used + unrecog = used - inps.union(outs) + if excess_inputs or excess_outputs or unrecog: + msg = f'Transfer function for Component "{self._comp}" has a mismatch with consumed and produced!' + if excess_inputs: + msg += f'\n... Consumed but not used in transfer: {excess_inputs}' + if excess_outputs: + msg += f'\n... Produced but not used in transfer: {excess_outputs}' + if unrecog: + msg += f'\n... In transfer but not consumed or produced: {unrecog}' + self.raiseAnError(IOError, msg) diff --git a/src/TransferFuncs/__init__.py b/src/TransferFuncs/__init__.py new file mode 100644 index 00000000..4254935c --- /dev/null +++ b/src/TransferFuncs/__init__.py @@ -0,0 +1,15 @@ +# Copyright 2020, Battelle Energy Alliance, LLC +# ALL RIGHTS RESERVED +""" + ValuedParams are the flexible input sources used in HERON. In some way + they represent placeholder values to be evaluated at run time from a variety of sources, + ranging from constants to synthetic histories to AI and others. +""" +# only type references here, as needed +from .TransferFunc import TransferFunc +from .Ratio import Ratio +from .Polynomial import Polynomial + +# provide easy name access to module +from .Factory import factory + diff --git a/src/ValuedParams/Factory.py b/src/ValuedParams/Factory.py index b098a40f..d758d51c 100644 --- a/src/ValuedParams/Factory.py +++ b/src/ValuedParams/Factory.py @@ -9,9 +9,7 @@ from .StaticHistory import StaticHistory from .ROM import ROM from .Function import Function -from .Parametric import Parametric, FixedValue, OptBounds, SweepValues -from .Linear import Linear -from .Polynomial import Polynomial +from .Parametric import FixedValue, OptBounds, SweepValues from .Variable import Variable from .Activity import Activity from .RandomVariable import RandomVariable @@ -77,18 +75,9 @@ def make_input_specs(self, name, descr=None, allowed=None, kind='singular'): factory.registerType('Function', Function) factory.registerType('activity', Activity) factory.registerType('uncertainty', RandomVariable) -# ratios, transfers -factory.registerType('linear', Linear) -factory.registerType('ratio', Linear) -factory.registerType('poly', Polynomial) -# TODO add: ROM - -# TODO are transfer functions and valued evaluations really the same creature? # map of "kinds" of ValuedParams to the default acceptable ValuedParam types allowable = {} -# transfer functions, such as producing components' transfer functions -allowable['transfer'] = ['linear', 'poly', 'Function'] # single evaluations, like cashflow prices and component capacities allowable['singular'] = [ 'fixed_value', diff --git a/src/ValuedParams/Linear.py b/src/ValuedParams/Linear.py deleted file mode 100644 index 5a6d1277..00000000 --- a/src/ValuedParams/Linear.py +++ /dev/null @@ -1,100 +0,0 @@ - -# Copyright 2020, Battelle Energy Alliance, LLC -# ALL RIGHTS RESERVED -""" - Values that are expressed as linear ratios of one another. - Primarily intended for transfer functions. -""" -import numpy as np - -from .ValuedParam import ValuedParam, InputData, InputTypes -from .Polynomial import Polynomial - -ONE_TUP = tuple([1]) - -class Linear(Polynomial): - """ - Represents a ValuedParam that is a linearized transfer function. - """ - - @classmethod - def get_input_specs(cls): - """ - Define parameters for a linear transfer function. - @ In, None - @ Out, spec, InputData, value-based spec - """ - spec = InputData.parameterInputFactory('linear', contentType=InputTypes.StringType, - descr=r"""indicates this value should be interpreted as a ratio based on an input value.""") - rate = InputData.parameterInputFactory('rate', contentType=InputTypes.FloatType, - descr=r"""linear coefficient for the indicated \xmlAttr{resource}.""") - rate.addParam('resource', param_type=InputTypes.StringType, - descr=r"""indicates the resource for which the linear transfer rate is being provided in this node.""") - spec.addSub(rate) - return spec - - def __init__(self): - """ - Constructor. - @ In, None - @ Out, None - """ - super().__init__() - - def read(self, comp_name, spec, mode, alias_dict=None): - """ - Used to read valued param from XML input - @ In, comp_name, str, name of component that this valued param will be attached to; only used for print messages - @ In, spec, InputData params, input specifications - @ In, mode, type of simulation calculation - @ In, alias_dict, dict, optional, aliases to use for variable naming - @ Out, needs, list, signals needed to evaluate this ValuedParam at runtime - """ - super().read(comp_name, spec, mode, alias_dict=None) - for rate_node in spec.findAll('rate'): - resource = rate_node.parameterValues['resource'] - # linear coefficients are all order 1, no cross-variable multipyling - # coefficient signs come from activity, not coeffs, so fix that up too - self._coefficients[tuple([resource])][ONE_TUP] = abs(rate_node.value) - return [] - - def set_signs(self, var_map: dict, to_set: list) -> None: - """ - Sets the signs of stored values for this element according to the map given - @ In, var_map, dict, mapping of negative and positive vars - @ In, to_set, list, variable names to set - @ Out, None - """ - aaaaa - for resources, orders_dict in self._coefficients.items(): - res = resources[0] - val = orders_dict[ONE_TUP] - print(f'DEBUGG res: "{res}", val {val}') - if res in var_map['positive']: - sgn = 1 - elif res in var_map['negative']: - sgn = -1 - print(f'DEBUGG res: "{res}", val {val}, sgn: {sgn}') - orders_dict[ONE_TUP] = sgn * np.abs(val) - - def evaluate(self, inputs, target_var=None, aliases=None): - """ - Evaluate this ValuedParam, wherever it gets its data from - @ In, inputs, dict, stuff from RAVEN, particularly including the keys 'meta' and 'raven_vars' - @ In, target_var, str, optional, requested outgoing variable name if not None - @ In, aliases, dict, optional, alternate variable names for searching in variables - @ Out, balance, dict, dictionary of resulting evaluation as {vars: vals} - @ Out, inputs, dict, dictionary of meta (possibly changed during evaluation) - """ - # TODO is this an unnecessarily slow check? - if target_var not in [x[0] for x in self._coefficients]: - self.raiseAnError(RuntimeError, f'"rate" for target variable "{target_var}" not found for ' + - f'ValuedParam {self.name}!') - req_res, req_amt = next(iter(inputs['request'].items())) - req_rate = self._coefficients[(req_res)][(1)] - balance = {req_res: req_amt} - for res, rate in self._coefficients.items(): - if res == req_res: - continue - balance[res] = rate / req_rate * req_amt - return balance, inputs diff --git a/src/dispatch/PyomoModelHandler.py b/src/dispatch/PyomoModelHandler.py index 0ec3820a..49ffa3bf 100644 --- a/src/dispatch/PyomoModelHandler.py +++ b/src/dispatch/PyomoModelHandler.py @@ -363,21 +363,53 @@ def _create_transfer(self, comp, prod_name): @ In, prod_name, str, name of production variable @ Out, None """ - rule_name = f'{comp.name}_transfer_func' transfer = comp.get_interaction().get_transfer() if transfer is None: return + if transfer.type == 'Ratio': + self._create_transfer_ratio(transfer, comp, prod_name) + elif transfer.type == 'Polynomial': + self._create_transfer_poly(transfer, comp, prod_name) + else: + raise NotImplementedError(f'Transfer function type "{transfer.type}" not implemented for PyomoModelHandler!') + + def _create_transfer_ratio(self, transfer, comp, prod_name): + """ + Create a balance ratio-based transfer function. + This comes in the form of a balance expression (not an equality). + @ In, transfer, TransferFunc, Ratio transfer function + @ In, comp, Component, component object for this transfer + @ In, prod_name, str, name of production element + @ Out, None + """ + name = comp.name + coeffs = transfer.get_coefficients() + coeffs_iter = iter(coeffs.items()) + first_name, first_coef = next(coeffs_iter) + first_r = self.model.resource_index_map[comp][first_name] + for resource, coef in coeffs_iter: + ratio = coef / first_coef + r = self.model.resource_index_map[comp][resource] + rule_name = f'{name}_{resource}_{first_name}_transfer' + rule = lambda mod, t: prl.ratio_transfer_rule(ratio, r, first_r, prod_name, mod, t) + constr = pyo.Constraint(self.model.T, rule=rule) + setattr(self.model, rule_name, constr) + + def _create_transfer_poly(self, transfer, comp, prod_name): + """ + Create a polynomial transfer function. This comes in the form of an equality expression. + """ + rule_name = f'{comp.name}_transfer_func' coeffs = transfer.get_coefficients() # dict of form {(r1, r2): {(o1, o2): n}} # where: # r1, r2 are resource names # o1, o2 are polynomial orders (may not be integers?) # n is the float polynomial coefficient for the term - rule = lambda mod, t: prl.transfer_rule(coeffs, self.model.resource_index_map[comp], prod_name, mod, t) + rule = lambda mod, t: prl.poly_transfer_rule(coeffs, self.model.resource_index_map[comp], prod_name, mod, t) constr = pyo.Constraint(self.model.T, rule=rule) setattr(self.model, rule_name, constr) - def _create_storage(self, comp): """ Creates storage pyomo variable objects for a storage component diff --git a/src/dispatch/PyomoRuleLibrary.py b/src/dispatch/PyomoRuleLibrary.py index 38a0f317..75304641 100644 --- a/src/dispatch/PyomoRuleLibrary.py +++ b/src/dispatch/PyomoRuleLibrary.py @@ -287,7 +287,21 @@ def min_prod_rule(prod_name, r, caps, minimums, m, t) -> bool: else: return prod[r, t] <= minimums[t] -def transfer_rule(coeffs, r_map, prod_name, m, t) -> bool: +def ratio_transfer_rule(ratio: float, r: int, ref_r: int,prod_name: str, m, t) -> bool: + """ + Constructs transfer function constraints + @ In, ratio, float, balanced ratio of this resource to the reference resource + @ In, r, int, index for this resource in the activity map + @ In, ref_r, int, index of the reference resource in the activity map + @ In, prod_name, str, name of production variable + @ In, m, pyo.ConcreteModel, associated model + @ In, t, int, index of time variable + @ Out, transfer, bool, transfer ratio check + """ + activity = getattr(m, prod_name) + return activity[r, t] == activity[ref_r, t] * ratio + +def poly_transfer_rule(coeffs, r_map, prod_name, m, t) -> bool: """ Constructs transfer function constraints @ In, coeffs, dict, nested mapping of resources and polynomial orders to coefficients From 95dfa6e0219f99291ef9b603dcbacee4025b1e68 Mon Sep 17 00:00:00 2001 From: talbpw Date: Thu, 15 Feb 2024 12:13:36 -0700 Subject: [PATCH 20/24] poly tx working, analytic test passed --- src/Components.py | 89 ++----------------- src/TransferFuncs/Polynomial.py | 18 +++- src/TransferFuncs/Ratio.py | 18 ++++ src/TransferFuncs/TransferFunc.py | 18 +++- src/ValuedParamHandler.py | 8 -- src/ValuedParams/Parametric.py | 16 ---- src/ValuedParams/ValuedParam.py | 9 -- .../transfer_funcs/gold/dispatch_print.csv | 22 +++++ .../mechanics/transfer_funcs/heron_input.xml | 37 ++++++-- .../mechanics/transfer_funcs/tests | 13 --- 10 files changed, 104 insertions(+), 144 deletions(-) create mode 100644 tests/integration_tests/mechanics/transfer_funcs/gold/dispatch_print.csv diff --git a/src/Components.py b/src/Components.py index 7bf51667..d35873e3 100644 --- a/src/Components.py +++ b/src/Components.py @@ -444,13 +444,13 @@ def read_input(self, specs, mode, comp_name): if len(resources) == 1: self._capacity_var = list(resources)[0] else: - self.raiseAnError(IOError, 'If multiple resources are active, "capacity" requires a "resource" specified!') + self.raiseAnError(IOError, f'Component "{comp_name}": If multiple resources are active, "capacity" requires a "resource" specified!') ## minimum: basically the same as capacity, functionally if self._minimum and self._minimum_var is None: if len(resources) == 1: self._minimum_var = list(resources)[0] else: - self.raiseAnError(IOError, 'If multiple resources are active, "minimum" requires a "resource" specified!') + self.raiseAnError(IOError, f'Component "{comp_name}": If multiple resources are active, "minimum" requires a "resource" specified!') def _set_valued_param(self, name, comp, spec, mode): """ @@ -473,30 +473,7 @@ def finalize_init(self): @ In, None @ Out, None """ - # fix up signs: send sign map to valued params - var_map = self.map_vp_signs() - self.set_vp_signs(var_map) - - def map_vp_signs(self): - """ - Collect the signs of VPs based on the input/output of this component - @ In, None - @ Out, var_map, dict, map of variables to their correct sign - """ - var_map = {'negative': set(), 'positive': set()} - return var_map - - def set_vp_signs(self, var_map: dict): - """ - Set the signs of VPs based on the input/output of this component - @ In, var_map, dict, map of variables to their correct sign - @ Out, None - """ - # this list is bespoke - # TODO is there an automated way to come up with valued params that need editing? - self._capacity.set_signs(var_map, to_set=[self.get_capacity_var]) - if self._minimum is not None: - self._minimum.set_signs(var_map) + # nothing to do in general def get_capacity(self, meta, raw=False): """ @@ -788,7 +765,8 @@ def read_input(self, specs, mode, comp_name): self.raiseAnError(IOError, 'Any component that consumes a resource must have a transfer function describing the production process!') ## transfer elements are all in IO list if self._transfer is not None: - self._transfer.check_io(self.get_inputs(), self.get_outputs()) + self._transfer.check_io(self.get_inputs(), self.get_outputs(), comp_name) + self._transfer.set_io_signs(self.get_inputs(), self.get_outputs()) ## ramp limit is (0, 1] if self.ramp_limit is not None and not 0 < self.ramp_limit <= 1: self.raiseAnError(IOError, f'Ramp limit must be (0, 1] but got "{self.ramp_limit}"') @@ -811,30 +789,6 @@ def _set_transfer_func(self, name, comp, spec): self._transfer.read(comp, spec) found = True - def map_vp_signs(self): - """ - Collect the signs of VPs based on the input/output of this component - @ In, None - @ Out, var_map, dict, map of variables to their correct sign - """ - var_map = super().map_vp_signs() - # FIXME speed this up when we show it works - for consumed in self.get_inputs(): - var_map['negative'].add(consumed) - for produced in self.get_outputs(): - var_map['produced'].add(produced) - return var_map - - def set_vp_signs(self, var_map: dict): - """ - Set the signs of VPs based on the input/output of this component - @ In, var_map, dict, map of variables to their correct sign - @ Out, None - """ - super().set_vp_signs(var_map) - self._transfer.set_signs(var_map) - self.ramp_limit.set_signs(var_map) - def get_inputs(self): """ Returns the set of resources that are inputs to this interaction. @@ -954,28 +908,6 @@ def read_input(self, specs, mode, comp_name): # the capacity is limited by the stored resource. self._capacity_var = self._stores - def map_vp_signs(self): - """ - Collect the signs of VPs based on the input/output of this component - @ In, None - @ Out, var_map, dict, map of variables to their correct sign - """ - var_map = super().map_vp_signs() - for stores in self.get_ouputs(): - var_map['positive'].add(stores) - return var_map - - def set_vp_signs(self, var_map: dict): - """ - Set the signs of VPs based on the input/output of this component - @ In, var_map, dict, map of variables to their correct sign - @ Out, None - """ - super().set_vp_signs(var_map) - self._rate.set_signs(var_map) - self._initial_stored.set_signs(var_map) - # self._strategy? - def get_inputs(self): """ Returns the set of resources that are inputs to this interaction. @@ -1150,17 +1082,6 @@ def read_input(self, specs, mode, comp_name): self._demands = specs.parameterValues['resource'] Interaction.read_input(self, specs, mode, comp_name) - def map_vp_signs(self): - """ - Collect the signs of VPs based on the input/output of this component - @ In, None - @ Out, var_map, dict, map of variables to their correct sign - """ - var_map = super().map_vp_signs() - for demand in self.get_inputs(): - var_map['negative'].add(demand) - return var_map - def get_inputs(self): """ Returns the set of resources that are inputs to this interaction. diff --git a/src/TransferFuncs/Polynomial.py b/src/TransferFuncs/Polynomial.py index d7b0417e..2d0dd86f 100644 --- a/src/TransferFuncs/Polynomial.py +++ b/src/TransferFuncs/Polynomial.py @@ -27,7 +27,10 @@ def get_input_specs(cls): set equal to zero. For instance, the equation $ax^2 + bx + c = dy^2 + fy + g$ should be reformulated as $ax^2 + bx + (c-g) - dy^2 - fy = 0$.""") coeff = InputData.parameterInputFactory('coeff', contentType=InputTypes.FloatType, - descr=r"""one coefficient for one poloynomial term of the specified \xmlAttr{resources}.""") + descr=r"""one coefficient for one poloynomial term of the specified \xmlAttr{resources}. + Care should be taken to assure the sign of the coefficient is working as expected, + as consumed resources have a negative sign while produced resources have a positive + sign, and the full equation should have the form 0 = ... .""") coeff.addParam('resource', param_type=InputTypes.StringListType, descr=r"""indicates the resource(s) for which the polynomial coefficient is being provided in this node. Note that the order of the resources matters for specifying the polynomial \xmlAttr{order}.""") @@ -58,11 +61,22 @@ def read(self, comp_name, spec): @ Out, None """ super().read(comp_name, spec) - for coeff_node in spec.findAll('coeff'): + for coeff_node in spec.findFirst('poly').findAll('coeff'): resource = coeff_node.parameterValues['resource'] order = coeff_node.parameterValues['order'] self._coefficients[tuple(resource)][tuple(order)] = coeff_node.value + def get_resources(self): + """ + Provides the resources used in this transfer function. + @ In, None + @ Out, resources, set, set of resources used + """ + res_set = set() + for res_tup, ord_dict in self._coefficients.items(): + res_set = res_set.union(set(res_tup)) + return res_set + def get_coefficients(self): """ Returns linear coefficients. diff --git a/src/TransferFuncs/Ratio.py b/src/TransferFuncs/Ratio.py index 5c10befe..62099fae 100644 --- a/src/TransferFuncs/Ratio.py +++ b/src/TransferFuncs/Ratio.py @@ -5,6 +5,8 @@ Values that are expressed as linear ratios of one another. Primarily intended for transfer functions. """ +import numpy as np + from .TransferFunc import TransferFunc, InputData, InputTypes # class for custom dynamically-evaluated quantities @@ -74,3 +76,19 @@ def get_coefficients(self): @ Out, coeffs, dict, coefficient mapping """ return self._coefficients + + def set_io_signs(self, consumed, produced): + """ + Fix up input/output signs, if interpretable + @ In, consumed, list, list of resources consumed in the transfer + @ In, produced, list, list of resources produced in the transfer + @ Out, None + """ + for res, coef in self.get_coefficients().items(): + if res in consumed: + self._coefficients[res] = - np.abs(coef) + elif res in produced: + self._coefficients[res] = np.abs(coef) + else: + # should not be able to get here after IO gets checked! + raise RuntimeError('While checking transfer coefficient, resource "{res}" was neither consumed nor produced!') diff --git a/src/TransferFuncs/TransferFunc.py b/src/TransferFuncs/TransferFunc.py index ac5c2195..17aa47bf 100644 --- a/src/TransferFuncs/TransferFunc.py +++ b/src/TransferFuncs/TransferFunc.py @@ -39,7 +39,6 @@ def __init__(self): """ super().__init__() self.type = self.__class__.__name__ # class type, for easy checking - self._comp = None # component who uses this transfer function def __repr__(self) -> str: """ @@ -57,11 +56,12 @@ def read(self, comp_name, spec): @ Out, needs, list, signals needed to evaluate this ValuedParam at runtime """ - def check_io(self, inputs, outputs): + def check_io(self, inputs, outputs, comp_name): """ Checks that the transfer function uses all and only the resources used by the component. @ In, inputs, list, list of input resources to check against. @ In, outputs, list, list of output resources to check against. + @ In, comp_name, str, name of component that this transfer function is describing @ Out, None """ used = self.get_resources() @@ -71,7 +71,10 @@ def check_io(self, inputs, outputs): excess_outputs = outs - used unrecog = used - inps.union(outs) if excess_inputs or excess_outputs or unrecog: - msg = f'Transfer function for Component "{self._comp}" has a mismatch with consumed and produced!' + msg = f'Transfer function for Component "{comp_name}" has a mismatch with consumed and produced!' + msg += f'\n... All Consumed: {inps}' + msg += f'\n... All Produced: {outs}' + msg += f'\n... All in Transfer Function: {used}' if excess_inputs: msg += f'\n... Consumed but not used in transfer: {excess_inputs}' if excess_outputs: @@ -79,3 +82,12 @@ def check_io(self, inputs, outputs): if unrecog: msg += f'\n... In transfer but not consumed or produced: {unrecog}' self.raiseAnError(IOError, msg) + + def set_io_signs(self, consumed, produced): + """ + Fix up input/output signs, if interpretable + @ In, consumed, list, list of resources consumed in the transfer + @ In, produced, list, list of resources produced in the transfer + @ Out, None + """ + # nothing to do by default diff --git a/src/ValuedParamHandler.py b/src/ValuedParamHandler.py index bf4c1978..1e30aa63 100644 --- a/src/ValuedParamHandler.py +++ b/src/ValuedParamHandler.py @@ -129,14 +129,6 @@ def crosscheck(self, interaction): """ self._vp.crosscheck(interaction) - def set_signs(self, var_map: dict, to_set: list=None) -> None: - """ - Set the signs of the parameters for this VP based on provided conventions. - @ In, var_map, dict, lists of variables under 'positive' or 'negative' keys - @ Out, None - """ - self._vp.set_signs(var_map, to_set=to_set) - def get_source(self): """ Access the source type and name of the VP diff --git a/src/ValuedParams/Parametric.py b/src/ValuedParams/Parametric.py index 61923e23..077fd5f7 100644 --- a/src/ValuedParams/Parametric.py +++ b/src/ValuedParams/Parametric.py @@ -58,22 +58,6 @@ def read(self, comp_name, spec, mode, alias_dict=None): self._debug_value = spec.parameterValues.get('debug_value', None) return [] - def set_signs(self, var_map: dict, to_set: list) -> None: - """ - Sets the signs of stored values for this element according to the map given - @ In, var_map, dict, mapping of negative and positive vars - @ In, to_set, list, variable names to set - @ Out, None - """ - if len(to_set) > 0: - raise RuntimeError(f'Received multipe "to_set" values ({to_set}) in setting signs for {self.__class__.__name__}!') - res = to_set[0] - if res in var_map['positive']: - sgn = 1 - elif res in var_map['negative']: - sgn = -1 - self._parametric = sgn * np.abs(self._parametric) - def get_value(self, debug=False): """ Get the value for this parametric source. diff --git a/src/ValuedParams/ValuedParam.py b/src/ValuedParams/ValuedParam.py index 99a9980c..051f6c93 100644 --- a/src/ValuedParams/ValuedParam.py +++ b/src/ValuedParams/ValuedParam.py @@ -106,15 +106,6 @@ def set_object(self, obj): """ self._target_obj = obj - def set_signs(self, var_map: dict, to_set: list) -> None: - """ - Sets the signs of elements of this ValuedParam based on keywords. - @ In, var_map, dict, maps of positive and negative signed keywords - @ In, to_set, list, list of variables to set signs for - @ Out, None - """ - # nothing to do by default - def evaluate(self, inputs, target_var=None, aliases=None): """ Evaluate this ValuedParam, wherever it gets its data from diff --git a/tests/integration_tests/mechanics/transfer_funcs/gold/dispatch_print.csv b/tests/integration_tests/mechanics/transfer_funcs/gold/dispatch_print.csv new file mode 100644 index 00000000..9d2788d3 --- /dev/null +++ b/tests/integration_tests/mechanics/transfer_funcs/gold/dispatch_print.csv @@ -0,0 +1,22 @@ +RAVEN_sample_ID,Time,_ROM_Cluster,Year,scaling,Dispatch__FundingSource__production__funding,Dispatch__LaborSource__production__labor,Dispatch__BalanceRatio__production__funding,Dispatch__BalanceRatio__production__work,Dispatch__Quadratic__production__funding,Dispatch__Quadratic__production__labor,Dispatch__Quadratic__production__work,Dispatch__Milestones__production__work,Dispatch__Outsource__production__funding,Dispatch__BusyWork__production__labor,Signal,PointProbability,prefix,ProbabilityWeight +0,0.0,0,10,1.0,200.0,500.000004998,-99.9999990104,24.9999997526,-100.000000998,-500.000005007,615.000006654,-640.000006407,7.99132605438e-09,8.80671259177e-09,-2.51444698396e-10,1.0,1,1.0 +0,0.1,0,10,1.0,200.0,500.000004998,-99.9999990104,24.9999997526,-100.000000998,-500.000005007,615.000006654,-640.000006407,7.99132605438e-09,8.80671259177e-09,0.0627905192528,1.0,1,1.0 +0,0.2,0,10,1.0,200.0,500.000004998,-99.9999990104,24.9999997526,-100.000000998,-500.000005007,615.000006654,-640.000006407,7.99132605438e-09,8.80671259177e-09,0.125333233263,1.0,1,1.0 +0,0.3,0,10,1.0,200.0,500.000004998,-99.9999990104,24.9999997526,-100.000000998,-500.000005007,615.000006654,-640.000006407,7.99132605438e-09,8.80671259177e-09,0.187381314259,1.0,1,1.0 +0,0.4,0,10,1.0,200.0,500.000004998,-99.9999990104,24.9999997526,-100.000000998,-500.000005007,615.000006654,-640.000006407,7.99132605438e-09,8.80671259177e-09,0.248689886814,1.0,1,1.0 +0,0.5,0,10,1.0,200.0,500.000004998,-99.9999990104,24.9999997526,-100.000000998,-500.000005007,615.000006654,-640.000006407,7.99132605438e-09,8.80671259177e-09,0.309016994,1.0,1,1.0 +0,0.6,0,10,1.0,200.0,500.000004998,-99.9999990104,24.9999997526,-100.000000998,-500.000005007,615.000006654,-640.000006407,7.99132605438e-09,8.80671259177e-09,0.368124552286,1.0,1,1.0 +0,0.7,0,10,1.0,200.0,500.000004998,-99.9999990104,24.9999997526,-100.000000998,-500.000005007,615.000006654,-640.000006407,7.99132605438e-09,8.80671259177e-09,0.425779291144,1.0,1,1.0 +0,0.8,0,10,1.0,200.0,500.000004998,-99.9999990104,24.9999997526,-100.000000998,-500.000005007,615.000006654,-640.000006407,7.99132605438e-09,8.80671259177e-09,0.481753673658,1.0,1,1.0 +0,0.9,0,10,1.0,200.0,500.000004998,-99.9999990104,24.9999997526,-100.000000998,-500.000005007,615.000006654,-640.000006407,7.99132605438e-09,8.80671259177e-09,0.535826794514,1.0,1,1.0 +0,1.0,0,10,1.0,200.0,500.000004998,-99.9999990104,24.9999997526,-100.000000998,-500.000005007,615.000006654,-640.000006407,7.99132605438e-09,8.80671259177e-09,0.587785251806,1.0,1,1.0 +0,1.1,0,10,1.0,200.0,500.000004998,-99.9999990104,24.9999997526,-100.000000998,-500.000005007,615.000006654,-640.000006407,7.99132605438e-09,8.80671259177e-09,0.637423989243,1.0,1,1.0 +0,1.2,0,10,1.0,200.0,500.000004998,-99.9999990104,24.9999997526,-100.000000998,-500.000005007,615.000006654,-640.000006407,7.99132605438e-09,8.80671259177e-09,0.684547105404,1.0,1,1.0 +0,1.3,0,10,1.0,200.0,500.000004998,-99.9999990104,24.9999997526,-100.000000998,-500.000005007,615.000006654,-640.000006407,7.99132605438e-09,8.80671259177e-09,0.728968626879,1.0,1,1.0 +0,1.4,0,10,1.0,200.0,500.000004998,-99.9999990104,24.9999997526,-100.000000998,-500.000005007,615.000006654,-640.000006407,7.99132605438e-09,8.80671259177e-09,0.770513242217,1.0,1,1.0 +0,1.5,0,10,1.0,200.0,500.000004998,-99.9999990104,24.9999997526,-100.000000998,-500.000005007,615.000006654,-640.000006407,7.99132605438e-09,8.80671259177e-09,0.809016993801,1.0,1,1.0 +0,1.6,0,10,1.0,200.0,500.000004998,-99.9999990104,24.9999997526,-100.000000998,-500.000005007,615.000006654,-640.000006407,7.99132605438e-09,8.80671259177e-09,0.844327924914,1.0,1,1.0 +0,1.7,0,10,1.0,200.0,500.000004998,-99.9999990104,24.9999997526,-100.000000998,-500.000005007,615.000006654,-640.000006407,7.99132605438e-09,8.80671259177e-09,0.876306679443,1.0,1,1.0 +0,1.8,0,10,1.0,200.0,500.000004998,-99.9999990104,24.9999997526,-100.000000998,-500.000005007,615.000006654,-640.000006407,7.99132605438e-09,8.80671259177e-09,0.904827051854,1.0,1,1.0 +0,1.9,0,10,1.0,200.0,500.000004998,-99.9999990104,24.9999997526,-100.000000998,-500.000005007,615.000006654,-640.000006407,7.99132605438e-09,8.80671259177e-09,0.929776485266,1.0,1,1.0 +0,2.0,0,10,1.0,200.0,500.000004998,-99.9999990104,24.9999997526,-100.000000998,-500.000005007,615.000006654,-640.000006407,7.99132605438e-09,8.80671259177e-09,0.951056515665,1.0,1,1.0 diff --git a/tests/integration_tests/mechanics/transfer_funcs/heron_input.xml b/tests/integration_tests/mechanics/transfer_funcs/heron_input.xml index 97f1bc72..195b867a 100644 --- a/tests/integration_tests/mechanics/transfer_funcs/heron_input.xml +++ b/tests/integration_tests/mechanics/transfer_funcs/heron_input.xml @@ -48,7 +48,7 @@ - + - + funding -100 - + 4 1 - + @@ -80,9 +80,10 @@ + - - + funding, labor @@ -93,14 +94,14 @@ -0.9 -1 -1e-6 - 1 + -1 2 - --> + @@ -140,6 +141,24 @@ + + + + -500 + + + + 2 + + + labor + + + 1 + + + + diff --git a/tests/integration_tests/mechanics/transfer_funcs/tests b/tests/integration_tests/mechanics/transfer_funcs/tests index 21236e68..bab9be3a 100644 --- a/tests/integration_tests/mechanics/transfer_funcs/tests +++ b/tests/integration_tests/mechanics/transfer_funcs/tests @@ -2,25 +2,12 @@ [./DebugMode] type = 'HeronIntegration' input = heron_input.xml - [./dispatch_db] - type = NetCDF - output = 'Debug_Run_o/dispatch.nc' - gold_files = 'dispatch.nc' - [../] [./dispatch_csv] type = UnorderedCSV output = 'Debug_Run_o/dispatch_print.csv' gold_files = 'dispatch_print.csv' rel_err = 1e-8 [../] - [./debug_plot] - type = Exists - output = 'Debug_Run_o/dispatch_id0_y10_c0.png Debug_Run_o/dispatch_id0_y11_c0.png Debug_Run_o/dispatch_id1_y10_c0.png Debug_Run_o/dispatch_id1_y11_c0.png' - [../] - [./debug_plot] - type = Exists - output = 'network.png' - [../] [../] [] From 5bc95b91ba9f201a739b823baba6ac7f52745ca4 Mon Sep 17 00:00:00 2001 From: talbpw Date: Thu, 15 Feb 2024 12:26:52 -0700 Subject: [PATCH 21/24] cleanup --- src/Components.py | 4 ++-- src/dispatch/PyomoModelHandler.py | 4 ++++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/Components.py b/src/Components.py index d35873e3..48c13691 100644 --- a/src/Components.py +++ b/src/Components.py @@ -454,10 +454,10 @@ def read_input(self, specs, mode, comp_name): def _set_valued_param(self, name, comp, spec, mode): """ - sets up use of a valuedparam for this interaction for the "name" attribute of this class. + Sets up use of a ValuedParam for this interaction for the "name" attribute of this class. @ In, name, str, name of member of this class @ In, comp, str, name of associated component - @ In, spec, inputparam, input specifications + @ In, spec, InputParam, input specifications @ In, mode, string, case mode to operate in (e.g. 'sweep' or 'opt') @ Out, None """ diff --git a/src/dispatch/PyomoModelHandler.py b/src/dispatch/PyomoModelHandler.py index 49ffa3bf..05878a6c 100644 --- a/src/dispatch/PyomoModelHandler.py +++ b/src/dispatch/PyomoModelHandler.py @@ -398,6 +398,10 @@ def _create_transfer_ratio(self, transfer, comp, prod_name): def _create_transfer_poly(self, transfer, comp, prod_name): """ Create a polynomial transfer function. This comes in the form of an equality expression. + @ In, transfer, TransferFunc, Ratio transfer function + @ In, comp, Component, component object for this transfer + @ In, prod_name, str, name of production element + @ Out, None """ rule_name = f'{comp.name}_transfer_func' coeffs = transfer.get_coefficients() From 56131ca87bfe531fa8fc0e1571170bc59b90e8d5 Mon Sep 17 00:00:00 2001 From: talbpw Date: Thu, 15 Feb 2024 13:14:05 -0700 Subject: [PATCH 22/24] requiring solver ipopt for nonlinear solve --- tests/integration_tests/mechanics/transfer_funcs/tests | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/integration_tests/mechanics/transfer_funcs/tests b/tests/integration_tests/mechanics/transfer_funcs/tests index bab9be3a..3b4a0229 100644 --- a/tests/integration_tests/mechanics/transfer_funcs/tests +++ b/tests/integration_tests/mechanics/transfer_funcs/tests @@ -7,6 +7,7 @@ output = 'Debug_Run_o/dispatch_print.csv' gold_files = 'dispatch_print.csv' rel_err = 1e-8 + needed_executable = 'ipopt' [../] [../] [] From 782e333ef99a5d67fc8bc0df300ab30077342ad1 Mon Sep 17 00:00:00 2001 From: talbpw Date: Thu, 15 Feb 2024 14:20:53 -0700 Subject: [PATCH 23/24] most of review comments addressed --- src/TransferFuncs/Factory.py | 2 -- src/TransferFuncs/Ratio.py | 11 ++++++++--- src/TransferFuncs/TransferFunc.py | 2 +- src/TransferFuncs/__init__.py | 6 +++--- src/ValuedParams/Parametric.py | 2 -- 5 files changed, 12 insertions(+), 11 deletions(-) diff --git a/src/TransferFuncs/Factory.py b/src/TransferFuncs/Factory.py index 75114cf8..8479a714 100644 --- a/src/TransferFuncs/Factory.py +++ b/src/TransferFuncs/Factory.py @@ -17,8 +17,6 @@ def make_input_specs(self, name, descr=None): Fill input specs for the provided name and description. @ In, name, str, name of new spec @ In, descr, str, optional, description of spec - @ In, allowed, list, optional, string list of allowable types of ValuedParam. Overrides "kind". - @ In, kind, str, optional, kind of ValuedParam grouping (default) @ Out, spec, InputData, specification """ add_descr = '' diff --git a/src/TransferFuncs/Ratio.py b/src/TransferFuncs/Ratio.py index 62099fae..741baf51 100644 --- a/src/TransferFuncs/Ratio.py +++ b/src/TransferFuncs/Ratio.py @@ -25,8 +25,11 @@ def get_input_specs(cls): """ spec = InputData.parameterInputFactory('ratio', contentType=InputTypes.StringType, descr=r"""indicates this transfer function is a constant linear combination of resources. For example, - (3a, 7b, -2c) means that the ratio of (3, 7, -2) must be maintained between (a, b, c) for all - production levels. For an equation-based transfer function instead of balance ratio, see Polynomial.""") + a balance equation might be written as 3a + 7b -> 2c, implying that to make 2c, it always takes + 3 parts a and 7 parts b, or the balance ratio (3a, 7b, 2c). This means that the ratio of (3, 7, 2) must be + maintained between (a, b, c) for all production levels. Note that the coefficient signs are automatically fixed + internally to be negative for consumed quantities and positive for produced resources, regardless of signs used + by the user. For an equation-based transfer function instead of balance ratio, see Polynomial.""") rate = InputData.parameterInputFactory('rate', contentType=InputTypes.FloatType, descr=r"""linear coefficient for the indicated \xmlAttr{resource}.""") rate.addParam('resource', param_type=InputTypes.StringType, @@ -55,8 +58,10 @@ def read(self, comp_name, spec): node = spec.findFirst('ratio') # ALIAS SUPPORT if node is None: - self.raiseAWarning('"linear" has been deprecated and will be removed in the future; see "ratio" transfer function!') node = spec.findFirst('linear') + if node is None: + self.raiseAnError(IOError, f'Unrecognized transfer function for component "{comp_name}": "{spec.name}"') + self.raiseAWarning('"linear" has been deprecated and will be removed in the future; see "ratio" transfer function!') for rate_node in node.findAll('rate'): resource = rate_node.parameterValues['resource'] self._coefficients[resource] = rate_node.value diff --git a/src/TransferFuncs/TransferFunc.py b/src/TransferFuncs/TransferFunc.py index 17aa47bf..3cd92309 100644 --- a/src/TransferFuncs/TransferFunc.py +++ b/src/TransferFuncs/TransferFunc.py @@ -34,7 +34,7 @@ def get_input_specs(cls, name): def __init__(self): """ Constructor. - @ In, name, str, name of this valued param + @ In, None @ Out, None """ super().__init__() diff --git a/src/TransferFuncs/__init__.py b/src/TransferFuncs/__init__.py index 4254935c..b8422e5d 100644 --- a/src/TransferFuncs/__init__.py +++ b/src/TransferFuncs/__init__.py @@ -1,9 +1,9 @@ # Copyright 2020, Battelle Energy Alliance, LLC # ALL RIGHTS RESERVED """ - ValuedParams are the flexible input sources used in HERON. In some way - they represent placeholder values to be evaluated at run time from a variety of sources, - ranging from constants to synthetic histories to AI and others. + Transfer functions describe the balance between consumed and produced + resources for generating components. This module defines the templates + that can be used to describe transfer functions. """ # only type references here, as needed from .TransferFunc import TransferFunc diff --git a/src/ValuedParams/Parametric.py b/src/ValuedParams/Parametric.py index 077fd5f7..6b1c6e0a 100644 --- a/src/ValuedParams/Parametric.py +++ b/src/ValuedParams/Parametric.py @@ -5,8 +5,6 @@ Values that are swept, optimized, or fixed in the "outer" workflow, so end up being constants in the "inner" workflow. """ -import numpy as np - from .ValuedParam import ValuedParam, InputData, InputTypes # class for custom dynamically-evaluated quantities From 662e50069db576210c086737198f46d3180bfd81 Mon Sep 17 00:00:00 2001 From: talbpw Date: Thu, 15 Feb 2024 14:35:39 -0700 Subject: [PATCH 24/24] moved ipopt executable check --- tests/integration_tests/mechanics/transfer_funcs/tests | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration_tests/mechanics/transfer_funcs/tests b/tests/integration_tests/mechanics/transfer_funcs/tests index 3b4a0229..18b4c382 100644 --- a/tests/integration_tests/mechanics/transfer_funcs/tests +++ b/tests/integration_tests/mechanics/transfer_funcs/tests @@ -2,12 +2,12 @@ [./DebugMode] type = 'HeronIntegration' input = heron_input.xml + needed_executable = 'ipopt' [./dispatch_csv] type = UnorderedCSV output = 'Debug_Run_o/dispatch_print.csv' gold_files = 'dispatch_print.csv' rel_err = 1e-8 - needed_executable = 'ipopt' [../] [../] []