Skip to content

Commit

Permalink
Add checks for inconsistent flow shares
Browse files Browse the repository at this point in the history
  • Loading branch information
henhuy committed Nov 15, 2023
1 parent a15161d commit 5ea6161
Show file tree
Hide file tree
Showing 2 changed files with 176 additions and 46 deletions.
148 changes: 109 additions & 39 deletions src/oemof/solph/components/experimental/_mimo_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,9 @@
from oemof.solph.flows import Flow


FLOW_SHARE_TYPES = ("min", "max", "fix")


class MultiInputMultiOutputConverter(Node):
"""A linear ConverterBlock object with n inputs and n outputs.
Expand Down Expand Up @@ -150,6 +153,9 @@ def __init__(
self.conversion_factors = self._init_conversion_factors(
conversion_factors
)

self._check_flow_shares(input_flow_shares)
self._check_flow_shares(output_flow_shares)
self.input_flow_shares = self._init_flow_shares(input_flow_shares)
self.output_flow_shares = self._init_flow_shares(output_flow_shares)

Expand Down Expand Up @@ -181,27 +187,62 @@ def _init_conversion_factors(
conversion_factors[cf] = sequence(1)
return conversion_factors

@staticmethod
def _check_flow_shares(flow_shares):
if flow_shares is None:
return

# Check for invalid share types
invalid_flow_share_types = set(flow_shares) - set(FLOW_SHARE_TYPES)
if invalid_flow_share_types:
raise ValueError(
f"Invalid flow share types found: {invalid_flow_share_types}. "
"Must be one of 'min', 'max' or 'fix'."
)

# Check if fix flow share is combined with min or max flow share
if "fix" in flow_shares:
for node in flow_shares["fix"]:
if "min" in flow_shares and node in flow_shares["min"]:
raise ValueError(
"Cannot combine 'fix' and 'min' flow share for same "
"node."
)
if "max" in flow_shares and node in flow_shares["max"]:
raise ValueError(
"Cannot combine 'fix' and 'max' flow share for same "
"node."
)

@staticmethod
def _init_flow_shares(
flow_shares: Dict[Bus, Union[float, Iterable]]
) -> Dict[Bus, Iterable]:
flow_shares: Dict[str, Dict[Bus, Union[float, Iterable]]]
) -> Dict[str, Dict[Bus, Iterable]]:
"""
Init flow shares. Set up empty dict, if flow shares are not set.
For each given flow share, turn value into sequence if necessary.
Init minimum, maximum and fix flow shares. Set up empty dict, if flow
shares are not set. For each given flow share, turn value into sequence
if necessary.
Parameters
----------
flow_shares : Dict[Bus, Union[float, Iterable]]
flow_shares : Dict[str, Dict[Bus, Union[float, Iterable]]]
flow shares set up by the user.
Returns
-------
Dict[Bus, Iterable]
Dict[str, Dict[Bus, Iterable]]
Flow shares as sequences
"""
if flow_shares is None:
return {}
return {k: sequence(v) for k, v in flow_shares.items()}

# Turn flow shares into sequences
return {
share_type: {
node: sequence(value) for node, value in shares.items()
}
for share_type, shares in flow_shares.items()
}

@staticmethod
def _init_group_dict(
Expand Down Expand Up @@ -368,7 +409,7 @@ def _output_group_relation(block, n, g, p, t):
rule=_output_group_relation,
)

self.input_output_relation = Constraint(
self.input_output_group_relation = Constraint(
[
(n, g, p, t)
for p, t in m.TIMEINDEX
Expand All @@ -385,7 +426,7 @@ def _input_output_group_relation(block):
for i, ii in zip(
list(n.input_groups)[:-1], list(n.input_groups)[1:]
):
block.input_output_relation.add(
block.input_output_group_relation.add(
(n, i, p, t),
(
block.INPUT_GROUP_FLOW[n, i, p, t]
Expand All @@ -396,7 +437,7 @@ def _input_output_group_relation(block):
for o, oo in zip(
list(n.output_groups)[:-1], list(n.output_groups)[1:]
):
block.input_output_relation.add(
block.input_output_group_relation.add(
(n, o, p, t),
(
block.OUTPUT_GROUP_FLOW[n, o, p, t]
Expand All @@ -407,45 +448,63 @@ def _input_output_group_relation(block):
# Use last input item as index
last_input = list(n.input_groups)[-1]
last_output = list(n.output_groups)[-1]
block.input_output_relation.add(
block.input_output_group_relation.add(
(n, last_input, p, t),
(
block.INPUT_GROUP_FLOW[n, last_input, p, t]
== block.OUTPUT_GROUP_FLOW[n, last_output, p, t]
),
)

self.input_flow_share_relation_build = BuildAction(
self.input_output_group_relation_build = BuildAction(
rule=_input_output_group_relation
)

def _get_operator_from_flow_share_type(flow_share_type):
if flow_share_type == "min":
return operator.gt
if flow_share_type == "max":
return operator.lt
if flow_share_type == "fix":
return operator.eq
raise ValueError(f"Unknown flow share type: {flow_share_type}")

self.input_flow_share_relation = Constraint(
[
(n, g, p, t)
(n, g, s, p, t)
for p, t in m.TIMEINDEX
for n in group
for g in n.input_groups
for s in FLOW_SHARE_TYPES
],
noruleinit=True,
)

def _input_flow_share_relation(block):
for p, t in m.TIMEINDEX:
for n in group:
for i, flow_share in n.input_flow_shares.items():
# Find related input group for given input node:
g = next(
g
for g, inputs in n.input_groups.items()
if i in inputs
)
lhs = m.flow[i, n, p, t] / n.conversion_factors[i][t]
rhs = (
block.INPUT_GROUP_FLOW[n, g, p, t] * flow_share[t]
)
block.input_flow_share_relation.add(
(n, g, p, t), (lhs == rhs)
for flow_share_type, shares in n.input_flow_shares.items():
op = _get_operator_from_flow_share_type(
flow_share_type
)
for i, flow_share in shares.items():
# Find related input group for given input node:
g = next(
g
for g, inputs in n.input_groups.items()
if i in inputs
)
lhs = (
m.flow[i, n, p, t] / n.conversion_factors[i][t]
)
rhs = (
block.INPUT_GROUP_FLOW[n, g, p, t]
* flow_share[t]
)
block.input_flow_share_relation.add(
(n, g, flow_share_type, p, t),
op(lhs, rhs),
)

self.input_flow_share_relation_build = BuildAction(
rule=_input_flow_share_relation
Expand All @@ -457,27 +516,38 @@ def _input_flow_share_relation(block):
for p, t in m.TIMEINDEX
for n in group
for g in n.output_groups
for s in FLOW_SHARE_TYPES
],

Check failure

Code scanning / CodeQL

Suspicious unused loop iteration variable Error

For loop variable 's' is not used in the loop body.
noruleinit=True,
)

def _output_flow_share_relation(block):
for p, t in m.TIMEINDEX:
for n in group:
for o, flow_share in n.output_flow_shares.items():
# Find related output group for given input node:
g = next(
g
for g, outputs in n.output_groups.items()
if o in outputs
)
lhs = m.flow[n, o, p, t] / n.conversion_factors[o][t]
rhs = (
block.OUTPUT_GROUP_FLOW[n, g, p, t] * flow_share[t]
)
block.input_flow_share_relation.add(
(n, g, p, t), (lhs == rhs)
for (
flow_share_type,
shares,
) in n.output_flow_shares.items():
op = _get_operator_from_flow_share_type(
flow_share_type
)
for o, flow_share in shares.items():
# Find related output group for given input node:
g = next(
g
for g, outputs in n.output_groups.items()
if o in outputs
)
lhs = (
m.flow[n, o, p, t] / n.conversion_factors[o][t]
)
rhs = (
block.OUTPUT_GROUP_FLOW[n, g, p, t]
* flow_share[t]
)
block.input_flow_share_relation.add(
(n, g, flow_share_type, p, t), op(lhs, rhs)
)

self.output_flow_share_relation_build = BuildAction(
rule=_output_flow_share_relation
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
SPDX-License-Identifier: MIT
"""
import pandas as pd
import pytest

from oemof.solph import EnergySystem
from oemof.solph import Model
Expand All @@ -16,6 +17,49 @@
from oemof.solph.flows import Flow


def test_invalid_flow_shares():
with pytest.raises(
ValueError, match="Invalid flow share types found: {'maxx'}"
):
b_gas = Bus(label="gas")
b_hydro = Bus(label="gas")
b_electricity = Bus(label="gas")
MultiInputMultiOutputConverter(
label="mimo",
inputs={"in": {b_gas: Flow(), b_hydro: Flow()}},
outputs={b_electricity: Flow()},
input_flow_shares={"maxx": {b_gas: 0.4}},
)

with pytest.raises(
ValueError,
match="Cannot combine 'fix' and 'min' flow share for same node.",
):
b_gas = Bus(label="gas")
b_hydro = Bus(label="gas")
b_electricity = Bus(label="gas")
MultiInputMultiOutputConverter(
label="mimo",
inputs={"in": {b_gas: Flow(), b_hydro: Flow()}},
outputs={b_electricity: Flow()},
input_flow_shares={"min": {b_gas: 0.4}, "fix": {b_gas: 0.4}},
)

with pytest.raises(
ValueError,
match="Cannot combine 'fix' and 'max' flow share for same node.",
):
b_gas = Bus(label="gas")
b_hydro = Bus(label="gas")
b_electricity = Bus(label="gas")
MultiInputMultiOutputConverter(
label="mimo",
inputs={"in": {b_gas: Flow(), b_hydro: Flow()}},
outputs={b_electricity: Flow()},
input_flow_shares={"max": {b_gas: 0.4}, "fix": {b_gas: 0.4}},
)


def test_multiple_inputs():
idx = pd.date_range("1/1/2017", periods=2, freq="H")
es = EnergySystem(timeindex=idx)
Expand Down Expand Up @@ -123,7 +167,7 @@ def test_flow_shares():
inputs={"in": {b_gas: Flow(), b_hydro: Flow()}},
outputs={b_electricity: Flow(), b_heat: Flow()},
conversion_factors={b_gas: 1.2, b_hydro: 1.3},
input_flow_shares={b_gas: [0.8, 0.3]},
input_flow_shares={"fix": {b_gas: [0.8, 0.3]}},
)
)

Expand All @@ -136,11 +180,27 @@ def test_flow_shares():
# create result object
results = processing.convert_keys_to_strings(processing.results(om))

assert results[("gas", "mimo")]["sequences"]["flow"].values[0] == 100 * 0.8 * 1.2
assert results[("gas", "mimo")]["sequences"]["flow"].values[1] == 100 * 0.3 * 1.2
assert results[("hydro", "mimo")]["sequences"]["flow"].values[0] == 100 * 0.2 * 1.3
assert results[("hydro", "mimo")]["sequences"]["flow"].values[1] == 100 * 0.7 * 1.3
assert results[("mimo", "electricity")]["sequences"]["flow"].values[0] == 100
assert results[("mimo", "electricity")]["sequences"]["flow"].values[1] == 100
assert (
results[("gas", "mimo")]["sequences"]["flow"].values[0]
== 100 * 0.8 * 1.2
)
assert (
results[("gas", "mimo")]["sequences"]["flow"].values[1]
== 100 * 0.3 * 1.2
)
assert (
results[("hydro", "mimo")]["sequences"]["flow"].values[0]
== 100 * 0.2 * 1.3
)
assert (
results[("hydro", "mimo")]["sequences"]["flow"].values[1]
== 100 * 0.7 * 1.3
)
assert (
results[("mimo", "electricity")]["sequences"]["flow"].values[0] == 100
)
assert (
results[("mimo", "electricity")]["sequences"]["flow"].values[1] == 100
)
assert results[("mimo", "heat")]["sequences"]["flow"].values[0] == 100
assert results[("mimo", "heat")]["sequences"]["flow"].values[1] == 100

0 comments on commit 5ea6161

Please sign in to comment.