Skip to content

Commit

Permalink
Allow missing data as single nested param key
Browse files Browse the repository at this point in the history
  • Loading branch information
brynpickering committed Oct 26, 2023
1 parent 4766cf3 commit 9251145
Show file tree
Hide file tree
Showing 6 changed files with 80 additions and 48 deletions.
8 changes: 6 additions & 2 deletions src/calliope/config/defaults.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,12 @@ config:
zero_threshold: 1e-10 # On postprocessing the optimisation results, values smaller than this threshold will be considered as optimisation artefacts and will be set to zero.

parameters:
bigM: # Large value used to define certain optimisation problems. See https://en.wikipedia.org/wiki/Big_M_method for more information. This value should be larger than the largest values that any decision variables can take, but should not be **too** large (i.e., do not set it greater than 3 orders of magnitude above the numeric range of the model). If too large, numerical problems may arise in the optimisation.
data: 1e9
# BigM is a large value used to define certain optimisation problems.
# See https://en.wikipedia.org/wiki/Big_M_method for more information.
# This value should be larger than the largest values that any decision variables can take,
# but should not be **too** large (i.e., do not set it greater than 3 orders of magnitude above the numeric range of the model).
# if too large, numerical problems may arise in the optimisation.
bigM: 1e9
objective_cost_class: # Weightings for cost classes to apply in the objective function.
data: 1
index: [monetary]
Expand Down
3 changes: 1 addition & 2 deletions src/calliope/example_models/national_scale/model.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -27,5 +27,4 @@ parameters:
index: [monetary]
dims: costs

bigM:
data: 1e6 # Sets the scale of unmet demand, which cannot be too high, otherwise the optimisation will not converge
bigM: 1e6 # Sets the scale of unmet demand, which cannot be too high, otherwise the optimisation will not converge
3 changes: 1 addition & 2 deletions src/calliope/example_models/urban_scale/model.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,5 +24,4 @@ parameters:
data: 1
index: [monetary]
dims: costs
bigM:
data: 1e6 # setting the scale of unmet demand, which cannot be too high, otherwise the optimisation will not converge
bigM: 1e6 # setting the scale of unmet demand, which cannot be too high, otherwise the optimisation will not converge
78 changes: 51 additions & 27 deletions src/calliope/preprocess/model_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,10 @@ def _add_top_level_params(self):
raise KeyError(
f"Trying to add top-level parameter with same name as a node/tech level parameter: {param_name}"
)

if not isinstance(param_data, dict):
param_data = {"data": param_data}

if "dims" in param_data:
index = param_data.get("index", None)
if index is None or not isinstance(index, list):
Expand All @@ -127,36 +131,56 @@ def _add_top_level_params(self):
else:
param_da = xr.DataArray(param_data["data"], name=param_name)

coords_to_update = {}
for coord_name, coord_data in param_da.coords.items():
if (
self.model_data.coords.get(coord_name, xr.DataArray()).dtype.kind
== "M"
):
LOGGER.debug(
f"(parameters, {param_name}) | Updating {coord_name} dimension index values to datetime format"
)
coords_to_update[coord_name] = pd.to_datetime(
coord_data, format="ISO8601"
)
for coord_name, coord_data in coords_to_update.items():
param_da.coords[coord_name] = coord_data
self._update_param_coords(param_name, param_da)
self._log_param_updates(param_name, param_da)

for coord_name, coord_data in param_da.coords.items():
if coord_name not in self.model_data.coords:
self.model_data = self.model_data.merge(param_da.to_dataset())

def _update_param_coords(self, param_name: str, param_da: xr.DataArray) -> None:
"""
Check array coordinates to see if any should be in datetime format,
if the base model coordinate is in datetime format.
Args:
param_name (str): name of parameter being added to the model.
param_da (xr.DataArray): array of parameter data.
"""
coords_to_update = {}
for coord_name, coord_data in param_da.coords.items():
if self.model_data.coords.get(coord_name, xr.DataArray()).dtype.kind == "M":
LOGGER.debug(
f"(parameters, {param_name}) | Updating {coord_name} dimension index values to datetime format"
)
coords_to_update[coord_name] = pd.to_datetime(
coord_data, format="ISO8601"
)
for coord_name, coord_data in coords_to_update.items():
param_da.coords[coord_name] = coord_data

def _log_param_updates(self, param_name: str, param_da: xr.DataArray) -> None:
"""
Check array coordinates to see if:
1. any are new compared to the base model dimensions.
2. any are adding new elements to an existing base model dimension.
Args:
param_name (str): name of parameter being added to the model.
param_da (xr.DataArray): array of parameter data.
"""
for coord_name, coord_data in param_da.coords.items():
if coord_name not in self.model_data.coords:
LOGGER.debug(
f"(parameters, {param_name}) | Adding a new dimension to the model: {coord_name}"
)
else:
new_coord_data = coord_data[
~coord_data.isin(self.model_data.coords[coord_name])
]
if new_coord_data.size > 0:
LOGGER.debug(
f"(parameters, {param_name}) | Adding a new dimension to the model: {coord_name}"
f"(parameters, {param_name}) | Adding a new value to the "
f"`{coord_name}` model coordinate: {new_coord_data.values}"
)
else:
new_coord_data = coord_data[
~coord_data.isin(self.model_data.coords[coord_name])
]
if new_coord_data.size > 0:
LOGGER.debug(
f"(parameters, {param_name}) | Adding a new value to the "
f"`{coord_name}` model coordinate: {new_coord_data.values}"
)
self.model_data = self.model_data.merge(param_da.to_dataset())

def _add_time_dimension(self):
self.data_pre_time = self.model_data.copy(deep=True)
Expand Down
2 changes: 1 addition & 1 deletion tests/test_constraint_results.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ def _run_model(feasibility, cap_val):
"2005-01-01 06:00:00",
"2005-01-01 08:00:00",
],
"parameters.bigM.data": 1e3,
"parameters.bigM": 1e3,
# Allow setting resource and flow_cap_max/equals to force infeasibility
"techs.test_supply_elec.constraints": {
"source_equals": cap_val,
Expand Down
34 changes: 20 additions & 14 deletions tests/test_model_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,12 @@ def model_run():
return model_run_from_yaml(filepath.as_posix(), scenario="simple_supply")[0]


class TestModelData:
@pytest.fixture(scope="function")
def model_data(self, model_run):
return ModelDataFactory(model_run)
@pytest.fixture(scope="function")
def model_data(model_run):
return ModelDataFactory(model_run)


class TestModelData:
@pytest.fixture(scope="class")
def model_data_w_params(self, model_run):
model_data = ModelDataFactory(model_run)
Expand Down Expand Up @@ -419,15 +420,16 @@ def test_add_attributes(self, model_data_w_params):

class TestTopLevelParams:
@pytest.fixture(scope="function")
def run_and_test(self):
def run_and_test(self, model_data):
def _run_and_test(in_dict, out_dict, dims):
model = build_model(
{"parameters.my_val": in_dict},
"simple_supply,two_hours",
)
model_data._extract_node_tech_data()
model_data._add_time_dimension()
model_data.params = {"my_val": in_dict}
model_data._add_top_level_params()

_data = pd.Series(out_dict).rename_axis(index=dims)
pd.testing.assert_series_equal(
model.inputs.my_val.to_series().dropna().reindex(_data.index),
model_data.model_data.my_val.to_series().dropna().reindex(_data.index),
_data,
check_dtype=False,
check_names=False,
Expand All @@ -448,17 +450,21 @@ def test_protected_parameter_names(self):
)

@pytest.mark.parametrize("val", [1, 1.0, np.inf, "foo"])
def test_top_level_param_single_val(self, val):
@pytest.mark.parametrize("dict_nesting", ["", ".data"])
def test_top_level_param_single_val(self, val, dict_nesting):
model = build_model(
{"parameters.my_val.data": val},
{f"parameters.my_val{dict_nesting}": val},
"simple_supply,two_hours",
)
assert model.inputs.my_val == xr.DataArray(val)

@pytest.mark.parametrize("val", [None, np.nan])
def test_top_level_param_single_val_cleaned_out_in_preprocessing(self, val):
@pytest.mark.parametrize("dict_nesting", ["", ".data"])
def test_top_level_param_single_val_cleaned_out_in_preprocessing(
self, val, dict_nesting
):
model = build_model(
{"parameters.my_val.data": val},
{f"parameters.my_val{dict_nesting}": val},
"simple_supply,two_hours",
)
assert "my_val" not in model.inputs
Expand Down

0 comments on commit 9251145

Please sign in to comment.