diff --git a/doc/helpers/generate_math.py b/doc/helpers/generate_math.py index dd0b5e6d5..39ac5e643 100644 --- a/doc/helpers/generate_math.py +++ b/doc/helpers/generate_math.py @@ -24,6 +24,26 @@ "supply_plus", ] +# TODO: find another way to define this +POSSIBLE_TIMESERIES_DATA = [ + "clustering_func", + "flow_eff", + "flow_ramping", + "export", + "om_con", + "om_prod", + "parasitic_eff", + "source_max", + "source_min", + "source_equals", + "sink_min", + "sink_max", + "sink_equals", + "source_eff", + "storage_loss", + "carrier_ratios", +] + def generate_base_math_model(model_config: dict) -> calliope.Model: """Generate RST file for the base math diff --git a/src/calliope/backend/backend_model.py b/src/calliope/backend/backend_model.py index 8cb86cee9..53667465b 100644 --- a/src/calliope/backend/backend_model.py +++ b/src/calliope/backend/backend_model.py @@ -765,7 +765,7 @@ def _drop_attrs(da): if expr.notnull().any() } - results = xr.Dataset({**all_variables, **all_global_expressions}) + results = xr.Dataset({**all_variables, **all_global_expressions}).astype(float) return results diff --git a/src/calliope/backend/helper_functions.py b/src/calliope/backend/helper_functions.py index eb0850d3b..3bccc91cc 100644 --- a/src/calliope/backend/helper_functions.py +++ b/src/calliope/backend/helper_functions.py @@ -28,8 +28,8 @@ def __init__( as_latex: bool = False, **kwargs, ) -> None: - """ - Abstract helper function class, which all helper functions must subclass. + """Abstract helper function class, which all helper functions must subclass. + The abstract properties and methods defined here must be defined by all helper functions. Args: @@ -52,15 +52,15 @@ def NAME(self) -> str: @abstractmethod def as_latex(self, *args, **kwargs) -> str: - """ - Method to update LaTeX math strings to include the action applied by the helper function. + """Method to update LaTeX math strings to include the action applied by the helper function. + This method is called when the class is initialised with ``as_latex=True``. """ @abstractmethod def as_array(self, *args, **kwargs) -> xr.DataArray: - """ - Method to apply the helper function to provide an n-dimensional array output. + """Method to apply the helper function to provide an n-dimensional array output. + This method is called when the class is initialised with ``as_latex=False``. """ @@ -75,8 +75,8 @@ def __call__(self, *args, **kwargs) -> Any: return self.as_array(*args, **kwargs) def __init_subclass__(cls): - """ - Override subclass definition in two ways: + """Override subclass definition in two ways: + 1. Do not allow new helper functions to have a name that is already defined (be it a built-in function or a custom function). 2. Wrap helper function __call__ in a check for the function being allowed in specific parsing string types. """ @@ -91,8 +91,7 @@ def __init_subclass__(cls): @staticmethod def _add_to_iterator(instring: str, iterator_converter: dict[str, str]) -> str: - """ - Utility function for generating latex strings in multiple helper functions. + """Utility function for generating latex strings in multiple helper functions. Find an iterator in the iterator substring of the component string (anything wrapped in `_text{}`). Other parts of the iterator substring can be anything @@ -123,8 +122,7 @@ def _replace_in_iterator(matched): @staticmethod def _instr(dim: str) -> str: - """ - Utility function for generating latex strings in multiple helper functions. + """Utility function for generating latex strings in multiple helper functions. Args: dim (str): Dimension suffixed with a "s" (e.g., "techs") @@ -135,6 +133,13 @@ def _instr(dim: str) -> str: dim_singular = dim.removesuffix("s") return rf"\text{{{dim_singular}}} \in \text{{{dim}}}" + @staticmethod + def _listify(val: Union[list[str], str]) -> list[str]: + if isinstance(val, list): + return val + else: + return [val] + class Inheritance(ParsingHelperFunction): #: @@ -146,8 +151,8 @@ def as_latex(self, tech_group: str) -> str: return rf"\text{{tech_group={tech_group}}}" def as_array(self, tech_group: str) -> xr.DataArray: - """ - Find all technologies which inherit from a particular technology group. + """Find all technologies which inherit from a particular technology group. + The technology group can be an abstract base group (e.g., `supply`, `storage`) or a user-defined technology group which itself inherits from one of the abstract base groups. Args: @@ -176,8 +181,7 @@ def as_latex(self, array: str, *, over: Union[str, list[str]]) -> str: return rf"\bigvee\limits_{{{overstring}}} ({array})" def as_array(self, parameter: str, *, over: Union[str, list[str]]) -> xr.DataArray: - """ - Reduce the boolean where array of a model parameter by applying `any` over some dimension(s). + """Reduce the boolean where array of a model parameter by applying `any` over some dimension(s). Args: parameter (str): Reference to a model input parameter @@ -199,13 +203,134 @@ def as_array(self, parameter: str, *, over: Union[str, list[str]]) -> xr.DataArr bool_parameter_da = self._kwargs["backend_dataset"][parameter].notnull() else: bool_parameter_da = xr.DataArray(False) - if not isinstance(over, list): - over = [over] + over = self._listify(over) available_dims = set(bool_parameter_da.dims).intersection(over) return bool_parameter_da.any(dim=available_dims, keep_attrs=True) +class Defined(ParsingHelperFunction): + #: + NAME = "defined" + #: + ALLOWED_IN = ["where"] + + def as_latex(self, *, within: str, how: Literal["all", "any"], **dims) -> str: + substrings = [] + for name, vals in dims.items(): + substrings.append(self._latex_substring(how, name, vals, within)) + if len(substrings) == 1: + return substrings[0] + else: + return rf"\bigwedge({', '.join(substrings)})" + + def as_array( + self, *, within: str, how: Literal["all", "any"], **dims: str + ) -> xr.DataArray: + """Find whether members of a model dimension are defined inside another. + + For instance, whether a node defines a specific tech (or group of techs). + Or, whether a tech defines a specific carrier. + + Args: + within (str): the model dimension to check. + how (Literal[all, any]): Whether to return True for `any` match of nested members or for `all` nested members. + + Kwargs: + dims (dict[str, str]): + **key**: dimension whose members will be searched for as being defined under the primary dimension (`within`). + **value**: subset of the dimension members to find. + `dims` must be one of the core model dimensions: [nodes, techs, carriers, carrier_tiers] + + Returns: + xr.DataArray: + For each member of `within`, True if any/all member(s) in `dims` is nested within that member. + + Examples: + Check for any of a list of techs being defined at nodes. + Assuming a YAML definition of: + + ```yaml + nodes: + node1: + techs: + tech1: + tech3: + node2: + techs: + tech2: + tech3: + ``` + Then: + ``` + >>> defined(techs=[tech1, tech2], within=nodes, how=any) + [out] + array([ True, False]) + Coordinates: + * nodes (nodes) >> defined(techs=[tech1, tech2], within=nodes, how=all) + [out] + array([ False, False]) + Coordinates: + * nodes (nodes) set: + """From the definition matrix, get the dimensions that have not been defined. + + This includes dimensions not defined as keys of `dims` or as the value of `within`. + + Args: + dim_names (list[str]): Keys of `dims`. + within (str): dimension whose members are being checked. + + Raises: + ValueError: Can only define dimensions that exist in model.definition_matrix. + + Returns: + set: Undefined dimensions to remove from the definition matrix. + """ + definition_matrix = self._kwargs["model_data"].definition_matrix + missing_dims = set([*dim_names, within]).difference(definition_matrix.dims) + if missing_dims: + raise ValueError( + f"Unexpected model dimension referenced in `{self.NAME}` helper function. " + "Only dimensions given by `model.inputs.definition_matrix` can be used. " + f"Received: {missing_dims}" + ) + return set(definition_matrix.dims).difference([*dim_names, within]) + + def _latex_substring( + self, + how: Literal["all", "any"], + dim: str, + vals: Union[str, list[str]], + within: str, + ) -> str: + if how == "all": + # Using wedge for "collective-and" + tex_how = "wedge" + elif how == "any": + # Using vee for "collective-or" + tex_how = "vee" + + vals = self._listify(vals) + within_singular = within.removesuffix("s") + dim_singular = dim.removesuffix("s") + selection = rf"\text{{{dim_singular}}} \in \text{{[{','.join(vals)}]}}" + + return rf"\big{tex_how}\limits_{{\substack{{{selection}}}}}\text{{{dim_singular} defined in {within_singular}}}" + + class Sum(ParsingHelperFunction): #: NAME = "sum" @@ -223,8 +348,7 @@ def as_latex(self, array: str, *, over: Union[str, list[str]]) -> str: def as_array( self, array: xr.DataArray, *, over: Union[str, list[str]] ) -> xr.DataArray: - """ - Sum an expression array over the given dimension(s). + """Sum an expression array over the given dimension(s). Args: array (xr.DataArray): expression array @@ -268,9 +392,9 @@ def as_array( """ return Sum(as_latex=self._as_latex, **self._kwargs)( array.where( - self._kwargs["model_data"] - .carrier.sel(carrier_tiers=carrier_tier) - .notnull() + self._kwargs["model_data"].definition_matrix.sel( + carrier_tiers=carrier_tier + ) ), over="carriers", ) @@ -289,6 +413,7 @@ def as_array( self, array: xr.DataArray, carrier_tier: Literal["in", "out"] ) -> xr.DataArray: """Reduce expression array data by selecting the carrier that corresponds to the primary carrier and then dropping the `carriers` dimension. + This function is only valid for `conversion_plus` technologies, so should only be included in a math component if the `where` string includes `inheritance(conversion_plus)` or an equivalent expression. @@ -327,8 +452,7 @@ def as_latex(self, array: str, **lookup_arrays: str) -> str: def as_array( self, array: xr.DataArray, **lookup_arrays: xr.DataArray ) -> xr.DataArray: - """ - Apply vectorised indexing on an arbitrary number of an input array's dimensions. + """Apply vectorised indexing on an arbitrary number of an input array's dimensions. Args: array (xr.DataArray): Array on which to apply vectorised indexing. @@ -417,7 +541,8 @@ def as_latex(self, **dim_idx_mapping: str) -> str: def as_array(self, **dim_idx_mapping: int) -> xr.DataArray: """Get value of a model dimension at a given integer index. - This function is primarily useful for timeseries data + + This function is primarily useful for timeseries data. Keyword Args: key (str): Model dimension in which to extract value. @@ -478,8 +603,7 @@ def as_latex(self, array: str, **roll_kwargs: str) -> str: return component def as_array(self, array: xr.DataArray, **roll_kwargs: int) -> xr.DataArray: - """ - Roll (a.k.a., shift) the array along the given dimension(s) by the given number of places. + """Roll (a.k.a., shift) the array along the given dimension(s) by the given number of places. Rolling keeps the array index labels in the same position, but moves the data by the given number of places. Args: diff --git a/src/calliope/backend/parsing.py b/src/calliope/backend/parsing.py index a538cd8f4..11eb2063d 100644 --- a/src/calliope/backend/parsing.py +++ b/src/calliope/backend/parsing.py @@ -681,7 +681,9 @@ def extend_equation_list_with_expression_group( for parsed_item_combination in parsed_item_product ] - def combine_exists_and_foreach(self, model_data: xr.Dataset) -> xr.DataArray: + def combine_definition_matrix_and_foreach( + self, model_data: xr.Dataset + ) -> xr.DataArray: """Generate a multi-dimensional boolean array based on the sets over which the constraint is to be built (defined by "foreach") and the model `exists` array. The `exists` array is a boolean array defining the structure of the model and is True for valid combinations of technologies consuming/producing specific carriers at specific nodes. @@ -694,7 +696,7 @@ def combine_exists_and_foreach(self, model_data: xr.Dataset) -> xr.DataArray: xr.DataArray: boolean array indexed over ["nodes", "techs", "carriers", "carrier_tiers"] + any additional dimensions provided by `foreach`. """ # Start with (carriers, carrier_tiers, nodes, techs) and go from there - exists = model_data.carrier.notnull() * model_data.node_tech.notnull() + exists = model_data.definition_matrix # Add other dimensions (costs, timesteps, etc.) add_dims = set(self.sets).difference(exists.dims) if add_dims.difference(model_data.dims): @@ -729,7 +731,7 @@ def generate_top_level_where_array( Returns: xr.DataArray: Boolean array defining on which index items a parsed component should be built. """ - foreach_where = self.combine_exists_and_foreach(model_data) + foreach_where = self.combine_definition_matrix_and_foreach(model_data) if not foreach_where.any(): self.log_not_added("'foreach' does not apply anywhere.") diff --git a/src/calliope/core/model.py b/src/calliope/core/model.py index a20e94602..4a900b371 100644 --- a/src/calliope/core/model.py +++ b/src/calliope/core/model.py @@ -512,7 +512,7 @@ def info(self) -> str: model_name = self.name info_strings.append(f"Model name: {model_name}") msize = dict(self._model_data.dims) - msize_exists = (self._model_data.node_tech * self._model_data.carrier).sum() + msize_exists = self._model_data.definition_matrix.sum() info_strings.append( f"Model size: {msize} ({msize_exists.item()} valid node:tech:carrier:carrier_tier combinations)" ) diff --git a/src/calliope/math/base.yaml b/src/calliope/math/base.yaml index 5770329e2..c23569a10 100644 --- a/src/calliope/math/base.yaml +++ b/src/calliope/math/base.yaml @@ -65,14 +65,14 @@ constraints: flow_out_max_conversion_plus: description: "Set the upper bound in each timestep of a `conversion_plus` technology's total outflow on its `out` carrier flows." foreach: [nodes, techs, timesteps] - where: "inheritance(conversion_plus) AND NOT cap_method=integer" + where: "inheritance(conversion_plus) AND NOT operating_units" equations: - expression: reduce_carrier_dim(flow_out, carrier_tier=out) <= timestep_resolution * flow_cap flow_out_min_conversion_plus: description: "Set the lower bound in each timestep of a `conversion_plus` technology's total outflow on its `out` carrier flows." foreach: [nodes, techs, timesteps] - where: "flow_out_min_relative AND inheritance(conversion_plus) AND NOT cap_method=integer" + where: "flow_out_min_relative AND inheritance(conversion_plus) AND NOT operating_units" equations: - expression: reduce_carrier_dim(flow_out, carrier_tier=out) >= timestep_resolution * flow_cap * flow_out_min_relative @@ -121,21 +121,21 @@ constraints: flow_out_max: description: "Set the upper bound of a non-`conversion_plus` technology's outflow." foreach: [nodes, techs, carriers, timesteps] - where: "carrier AND NOT inheritance(conversion_plus) AND NOT cap_method=integer AND allowed_flow_out=True AND [out] in carrier_tiers" + where: "NOT inheritance(conversion_plus) AND NOT operating_units AND allowed_flow_out=True AND [out] in carrier_tiers" equations: - expression: flow_out <= flow_cap * timestep_resolution * parasitic_eff flow_out_min: description: "Set the lower bound of a non-`conversion_plus` technology's outflow." foreach: [nodes, techs, carriers, timesteps] - where: "carrier AND flow_out_min_relative AND NOT inheritance(conversion_plus) AND NOT cap_method=integer AND allowed_flow_out=True AND [out] in carrier_tiers" + where: "flow_out_min_relative AND NOT inheritance(conversion_plus) AND NOT operating_units AND allowed_flow_out=True AND [out] in carrier_tiers" equations: - expression: flow_out >= flow_cap * timestep_resolution * flow_out_min_relative flow_in_max: description: "Set the upper bound of a non-`conversion_plus` technology's inflow." foreach: [nodes, techs, carriers, timesteps] - where: "carrier AND (inheritance(transmission) OR inheritance(demand) OR inheritance(storage)) AND (NOT cap_method=integer OR inheritance(demand)) AND allowed_flow_in=True AND [in] in carrier_tiers" + where: "(inheritance(transmission) OR inheritance(demand) OR inheritance(storage)) AND (NOT operating_units OR inheritance(demand)) AND allowed_flow_in=True AND [in] in carrier_tiers" equations: - expression: flow_in <= flow_cap * timestep_resolution @@ -325,22 +325,22 @@ constraints: foreach: [nodes, techs, carriers, timesteps] where: "flow_export AND export_max" equations: - - where: "cap_method=integer" + - where: "operating_units" expression: flow_export <= export_max * operating_units - - where: "NOT cap_method=integer" + - where: "NOT operating_units" expression: flow_export <= export_max unit_commitment_milp: description: "Set the upper bound of the number of integer units of technology that can exist, for any technology using integer units to define its capacity." foreach: [nodes, techs, timesteps] - where: "cap_method=integer" + where: "operating_units" equations: - expression: operating_units <= units flow_out_max_milp: description: "Set the upper bound of a non-`conversion_plus` technology's ability to produce carriers, for any technology using integer units to define its capacity." foreach: [nodes, techs, carriers, timesteps] - where: "operating_units AND carrier AND NOT inheritance(conversion_plus) AND allowed_flow_out=True" + where: "operating_units AND NOT inheritance(conversion_plus) AND allowed_flow_out=True" equations: - expression: flow_out <= operating_units * timestep_resolution * flow_cap_per_unit * parasitic_eff @@ -361,7 +361,7 @@ constraints: flow_out_min_milp: description: "Set the lower bound of a non-`conversion_plus` technology's ability to produce carriers, for any technology using integer units to define its capacity." foreach: [nodes, techs, carriers, timesteps] - where: "operating_units AND carrier AND flow_out_min_relative AND NOT inheritance(conversion_plus) AND allowed_flow_out=True" + where: "operating_units AND flow_out_min_relative AND NOT inheritance(conversion_plus) AND allowed_flow_out=True" equations: - expression: flow_out >= operating_units * timestep_resolution * flow_cap_per_unit * flow_out_min_relative @@ -457,11 +457,11 @@ constraints: - expression: $flow - roll($flow, timesteps=1) <= flow_ramping * flow_cap sub_expressions: flow: &ramping_flow - - where: "carrier AND allowed_flow_out=True AND NOT allowed_flow_in=True" + - where: "allowed_flow_out=True AND NOT allowed_flow_in=True" expression: flow_out / timestep_resolution - - where: "carrier AND allowed_flow_in=True AND NOT allowed_flow_out=True" + - where: "allowed_flow_in=True AND NOT allowed_flow_out=True" expression: flow_in / timestep_resolution - - where: "carrier AND allowed_flow_in=True AND allowed_flow_out=True" + - where: "allowed_flow_in=True AND allowed_flow_out=True" expression: (flow_out - flow_in) / timestep_resolution ramping_down: @@ -486,7 +486,7 @@ variables: description: "The outflow of a technology per timestep, also known as the flow discharged (from `storage` technologies) or the flow received (by `transmission` technologies) on a link." unit: energy foreach: [nodes, techs, carriers, timesteps] - where: "carrier AND allowed_flow_out=True AND [out, out_2, out_3] in carrier_tiers" + where: "allowed_flow_out=True AND [out, out_2, out_3] in carrier_tiers" bounds: min: 0 max: .inf @@ -495,7 +495,7 @@ variables: description: "The inflow to a technology per timestep, also known as the flow consumed (by `storage` technologies) or the flow sent (by `transmission` technologies) on a link." unit: energy foreach: [nodes, techs, carriers, timesteps] - where: "carrier AND allowed_flow_in=True AND [in, in_2, in_3] in carrier_tiers" + where: "allowed_flow_in=True AND [in, in_2, in_3] in carrier_tiers" bounds: min: 0 max: .inf diff --git a/src/calliope/preprocess/model_data.py b/src/calliope/preprocess/model_data.py index ce4b261f3..b75e4e051 100644 --- a/src/calliope/preprocess/model_data.py +++ b/src/calliope/preprocess/model_data.py @@ -471,19 +471,35 @@ def _clean_unused_techs_nodes_and_carriers(self): """ Remove techs not assigned to nodes, nodes with no associated techs, and carriers associated with removed techs """ - for dim in ["nodes", "techs"]: + self.model_data["definition_matrix"] = ( + self.model_data.node_tech + self.model_data.carrier + ) + for dim in self.model_data["definition_matrix"].dims: + orig_dim_vals = set(self.model_data.coords[dim].data) self.model_data = self.model_data.dropna( - dim, how="all", subset=["node_tech"] + dim, how="all", subset=["definition_matrix"] + ) + deleted_dim_vals = orig_dim_vals.difference( + set(self.model_data.coords[dim].data) ) - for dim in ["carriers", "carrier_tiers"]: - self.model_data = self.model_data.dropna(dim, how="all") + if deleted_dim_vals: + LOGGER.debug( + f"Deleting {dim} values as they are not defined anywhere in the model: {deleted_dim_vals}" + ) + self.model_data[ + "definition_matrix" + ] = self.model_data.definition_matrix.notnull() + + vars_to_delete = [ + var_name + for var_name, var in self.model_data.data_vars.items() + if var.isnull().all() + ] + if vars_to_delete: + LOGGER.debug(f"Deleting empty parameters: {vars_to_delete}") self.model_data = self.model_data.drop_vars( - [ - var_name - for var_name, var in self.model_data.data_vars.items() - if var.isnull().all() - ] + vars_to_delete + ["node_tech", "carrier"] ) def _add_param_from_template(self): diff --git a/tests/conftest.py b/tests/conftest.py index 666adadeb..9ddc2f4ac 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -135,13 +135,9 @@ def dummy_model_data(): for dim in ALL_DIMS }, data_vars={ - "node_tech": ( - ["nodes", "techs"], - np.random.choice(a=[np.nan, True], p=[0.05, 0.95], size=(2, 4)), - ), - "carrier": ( - ["carrier_tiers", "carriers", "techs"], - np.random.choice(a=[np.nan, True], p=[0.05, 0.95], size=(2, 2, 4)), + "definition_matrix": ( + ["nodes", "techs", "carrier_tiers", "carriers"], + np.random.choice(a=[False, True], p=[0.05, 0.95], size=(2, 4, 2, 2)), ), "with_inf": ( ["nodes", "techs"], diff --git a/tests/test_backend_helper_functions.py b/tests/test_backend_helper_functions.py index 25d7c7c38..44dabd1e3 100644 --- a/tests/test_backend_helper_functions.py +++ b/tests/test_backend_helper_functions.py @@ -27,6 +27,11 @@ def where_any(where, parsing_kwargs): return where["any"](**parsing_kwargs) +@pytest.fixture(scope="class") +def where_defined(where, parsing_kwargs): + return where["defined"](**parsing_kwargs) + + @pytest.fixture(scope="class") def expression_sum(expression, parsing_kwargs): return expression["sum"](**parsing_kwargs) @@ -62,6 +67,28 @@ class TestAsArray: def parsing_kwargs(self, dummy_model_data): return {"model_data": dummy_model_data} + @pytest.fixture(scope="function") + def is_defined_any(self, dummy_model_data): + def _is_defined(drop_dims, dims): + return ( + dummy_model_data.definition_matrix.any(drop_dims) + .sel(**dims) + .any(dims.keys()) + ) + + return _is_defined + + @pytest.fixture(scope="function") + def is_defined_all(self, dummy_model_data): + def _is_defined(drop_dims, dims): + return ( + dummy_model_data.definition_matrix.any(drop_dims) + .sel(**dims) + .all(dims.keys()) + ) + + return _is_defined + @pytest.mark.parametrize( ["string_type", "func_name"], [("where", "inheritance"), ("expression", "sum")] ) @@ -110,6 +137,49 @@ def test_any_exists(self, where_any, dummy_model_data, var, over, expected): summed = where_any(var, over=over) assert summed.equals(dummy_model_data[expected]) + def test_defined_any_one_dim_one_val(self, is_defined_any, where_defined): + dims = {"techs": "foobar"} + dims_check = {"techs": ["foobar"]} + defined = where_defined(within="nodes", how="any", **dims) + assert defined.equals(is_defined_any(["carriers", "carrier_tiers"], dims_check)) + assert defined.dtype.kind == "b" + + def test_defined_any_two_dim_one_val(self, is_defined_any, where_defined): + dims = {"techs": "foobar", "carriers": "foo"} + dims_check = {"techs": ["foobar"], "carriers": ["foo"]} + defined = where_defined(within="nodes", how="any", **dims) + assert defined.equals(is_defined_any(["carrier_tiers"], dims_check)) + + def test_defined_any_one_dim_multi_val(self, is_defined_any, where_defined): + dims = {"techs": ["foobar", "foobaz"]} + defined = where_defined(within="nodes", how="any", **dims) + assert defined.equals(is_defined_any(["carriers", "carrier_tiers"], dims)) + assert defined.dtype.kind == "b" + + def test_defined_any_one_dim_multi_val_techs_within( + self, is_defined_any, where_defined + ): + dims = {"carriers": ["foo", "bar"]} + defined = where_defined(within="techs", how="any", **dims) + assert defined.equals(is_defined_any(["nodes", "carrier_tiers"], dims)) + + def test_defined_any_two_dim_multi_val(self, is_defined_any, where_defined): + dims = {"techs": ["foobar", "foobaz"], "carriers": ["foo", "bar"]} + defined = where_defined(within="nodes", how="any", **dims) + assert defined.equals(is_defined_any(["carrier_tiers"], dims)) + assert defined.dtype.kind == "b" + + def test_defined_all_one_dim_one_val(self, is_defined_all, where_defined): + dims = {"techs": ["foobar"]} + defined = where_defined(within="nodes", how="all", **dims) + assert defined.equals(is_defined_all(["carriers", "carrier_tiers"], dims)) + assert defined.dtype.kind == "b" + + def test_defined_all_two_dim_one_val(self, is_defined_all, where_defined): + dims = {"techs": ["foobar"], "carriers": ["foo"]} + defined = where_defined(within="nodes", how="all", **dims) + assert defined.equals(is_defined_all(["carrier_tiers"], dims)) + @pytest.mark.parametrize("over", ["techs", ["techs"]]) def test_sum_one_dim(self, expression_sum, dummy_model_data, over): summed_array = expression_sum(dummy_model_data.only_techs, over=over) @@ -134,8 +204,11 @@ def test_reduce_carrier_dim(self, expression_reduce_carrier_dim, dummy_model_dat dummy_model_data.all_true_carriers, "foo" ) - assert dummy_model_data.carrier.sel(carrier_tiers="foo").sum() == reduced.sum() - assert not set(reduced.dims).symmetric_difference(["techs"]) + assert ( + dummy_model_data.definition_matrix.sel(carrier_tiers="foo").sum() + == reduced.sum() + ) + assert not set(reduced.dims).symmetric_difference(["nodes", "techs"]) def test_reduce_primary_carrier_dim( self, expression_reduce_primary_carrier_dim, dummy_model_data @@ -244,6 +317,47 @@ def test_any_not_exists(self, where_any): summed_string = where_any("foo", over="techs") assert summed_string == r"\bigvee\limits_{\text{tech} \in \text{techs}} (foo)" + def test_defined_any(self, where_defined): + defined_string = where_defined(within="nodes", how="any", techs="foobar") + assert ( + defined_string + == r"\bigvee\limits_{\substack{\text{tech} \in \text{[foobar]}}}\text{tech defined in node}" + ) + + def test_defined_any_multi_val(self, where_defined): + defined_string = where_defined( + within="nodes", how="any", techs=["foobar", "foobaz"] + ) + assert ( + defined_string + == r"\bigvee\limits_{\substack{\text{tech} \in \text{[foobar,foobaz]}}}\text{tech defined in node}" + ) + + def test_defined_any_multi_dim(self, where_defined): + defined_string = where_defined( + within="nodes", how="any", techs="foobar", carriers="foo" + ) + assert ( + defined_string + == r"\bigwedge(\bigvee\limits_{\substack{\text{tech} \in \text{[foobar]}}}\text{tech defined in node}, \bigvee\limits_{\substack{\text{carrier} \in \text{[foo]}}}\text{carrier defined in node})" + ) + + def test_defined_all(self, where_defined): + defined_string = where_defined(within="nodes", how="all", techs="foobar") + assert ( + defined_string + == r"\bigwedge\limits_{\substack{\text{tech} \in \text{[foobar]}}}\text{tech defined in node}" + ) + + def test_defined_all_multi_dim(self, where_defined): + defined_string = where_defined( + within="nodes", how="all", techs="foobar", carriers="foo" + ) + assert ( + defined_string + == r"\bigwedge(\bigwedge\limits_{\substack{\text{tech} \in \text{[foobar]}}}\text{tech defined in node}, \bigwedge\limits_{\substack{\text{carrier} \in \text{[foo]}}}\text{carrier defined in node})" + ) + @pytest.mark.parametrize( ["over", "expected_substring"], [ diff --git a/tests/test_backend_parsing.py b/tests/test_backend_parsing.py index 0e19d7598..4639b11ac 100644 --- a/tests/test_backend_parsing.py +++ b/tests/test_backend_parsing.py @@ -33,7 +33,7 @@ def component_obj(): @pytest.fixture(scope="function") def exists_array(component_obj, dummy_model_data): component_obj.sets = ["nodes", "techs"] - return component_obj.combine_exists_and_foreach(dummy_model_data) + return component_obj.combine_definition_matrix_and_foreach(dummy_model_data) @pytest.fixture @@ -629,7 +629,7 @@ def test_combine_exists_and_foreach_all_permutations( self, dummy_model_data, component_obj, foreach ): component_obj.sets = foreach - where = component_obj.combine_exists_and_foreach(dummy_model_data) + where = component_obj.combine_definition_matrix_and_foreach(dummy_model_data) assert not BASE_DIMS.difference(where.dims) assert not set(foreach).difference(where.dims) @@ -637,7 +637,7 @@ def test_combine_exists_and_foreach_all_permutations( def test_foreach_unidentified_name(self, caplog, dummy_model_data, component_obj): component_obj.sets = ["nodes", "techs", "foos"] caplog.set_level(logging.DEBUG) - component_obj.combine_exists_and_foreach(dummy_model_data) + component_obj.combine_definition_matrix_and_foreach(dummy_model_data) assert "indexed over unidentified set names" in caplog.text def test_evaluate_where_to_false(self, dummy_model_data, component_obj): diff --git a/tests/test_backend_pyomo.py b/tests/test_backend_pyomo.py index da8bb9cd6..acef7f429 100755 --- a/tests/test_backend_pyomo.py +++ b/tests/test_backend_pyomo.py @@ -1988,7 +1988,7 @@ def test_raise_error_on_constraint_with_nan(self, simple_supply): "equations": [ {"expression": "sum(flow_out, over=[nodes, timesteps]) >= 100"} ], - "where": "carrier AND allowed_flow_out=True AND [out, out_2, out_3] in carrier_tiers", # <- no error is raised because of this + "where": "allowed_flow_out=True AND [out, out_2, out_3] in carrier_tiers", # <- no error is raised because of this } constraint_name = "constraint-without-nan" @@ -2008,7 +2008,7 @@ def test_raise_error_on_constraint_with_nan(self, simple_supply): "equations": [ {"expression": "sum(flow_out, over=[nodes, timesteps]) >= 100"} ], - # "where": "carrier AND allowed_flow_out=True AND [out, out_2, out_3] in carrier_tiers", # <- no error would be raised with this uncommented + # "where": "allowed_flow_out=True AND [out, out_2, out_3] in carrier_tiers", # <- no error would be raised with this uncommented } constraint_name = "constraint-with-nan" @@ -2033,7 +2033,7 @@ def test_raise_error_on_expression_with_nan(self, simple_supply): expression_dict = { "foreach": ["techs", "carriers"], "equations": [{"expression": "sum(flow_out, over=[nodes, timesteps])"}], - "where": "carrier AND allowed_flow_out=True AND [out, out_2, out_3] in carrier_tiers", # <- no error is raised because of this + "where": "allowed_flow_out=True AND [out, out_2, out_3] in carrier_tiers", # <- no error is raised because of this } expression_name = "expression-without-nan" @@ -2051,7 +2051,7 @@ def test_raise_error_on_expression_with_nan(self, simple_supply): expression_dict = { "foreach": ["techs", "carriers"], "equations": [{"expression": "sum(flow_out, over=[nodes, timesteps])"}], - # "where": "carrier AND allowed_flow_out=True AND [out, out_2, out_3] in carrier_tiers", # <- no error would be raised with this uncommented + # "where": "allowed_flow_out=True AND [out, out_2, out_3] in carrier_tiers", # <- no error would be raised with this uncommented } expression_name = "expression-with-nan" diff --git a/tests/test_backend_where_parser.py b/tests/test_backend_where_parser.py index 0d74b395f..5cfedf691 100644 --- a/tests/test_backend_where_parser.py +++ b/tests/test_backend_where_parser.py @@ -157,12 +157,12 @@ def test_data_var_with_where_decision_variable_or_expr( ): """ Can't quite compare in the same way for decision variables / global expressions - as with params, because there is a random element to the `node_tech` initialisation array + as with params, because there is a random element to the `definition_matrix` array """ parsed_ = data_var.parse_string(data_var_string, parse_all=True) evaluated = parsed_[0].eval(**eval_kwargs) - # There's a chance that some values that *should* be True in evaluated are made False by a NaN value in `node_tech`, + # There's a chance that some values that *should* be True in evaluated are made False by a NaN value in `definition_matrix`, # #so we check that at least all the remaining True values match assert (evaluated & dummy_model_data[expected_similar]).equals(evaluated) diff --git a/tests/test_example_models.py b/tests/test_example_models.py index ccac2980f..8fabed9cb 100755 --- a/tests/test_example_models.py +++ b/tests/test_example_models.py @@ -521,6 +521,10 @@ def example_tester(self, source_unit, solver="cbc", solver_io=None): override_dict=unit_override, subset_time=["2005-07-01", "2005-07-01"] ) + solve_kwargs = {"solver": solver} + if solver_io: + solve_kwargs["solver_io"] = solver_io + solve_kwargs = {"solver": solver} if solver_io: solve_kwargs["solver_io"] = solver_io diff --git a/tests/test_model_data.py b/tests/test_model_data.py index 071b8d187..9de154f22 100644 --- a/tests/test_model_data.py +++ b/tests/test_model_data.py @@ -371,8 +371,13 @@ def test_clean_unused_techs_nodes_and_carriers(self, model_data): model_data._add_param_from_template() model_data._clean_unused_techs_nodes_and_carriers() model_data_new = model_data.model_data - for data_var in ["link_remote_techs", "link_remote_nodes", "node_tech"]: + for data_var in ["link_remote_techs", "link_remote_nodes"]: assert model_data_init[data_var].equals(model_data_new[data_var]) + assert "definition_matrix" not in model_data_init + assert "definition_matrix" in model_data_new + for data_var in ["node_tech", "carrier"]: + assert data_var not in model_data_new + assert model_data_new.definition_matrix.dtype.kind == "b" def test_add_time_dimension(self, model_data_w_params): assert not hasattr(model_data_w_params, "data_pre_time") diff --git a/tests/test_model_manipulation.py b/tests/test_model_manipulation.py index 518b81863..de332f2ce 100644 --- a/tests/test_model_manipulation.py +++ b/tests/test_model_manipulation.py @@ -45,10 +45,10 @@ def test_node_tech_active_false(self): model = build_model(overrides, "simple_storage,two_hours,investment_costs") # Ensure what should be gone is gone - assert ( - model._model_data.node_tech.sel(techs="test_storage", nodes="b") - .isnull() - .item() + assert not ( + model._model_data.definition_matrix.sel( + techs="test_storage", nodes="b" + ).any(["carriers", "carrier_tiers"]) ) def test_link_active_false(self): @@ -65,8 +65,5 @@ def test_link_tech_active_false(self): # Ensure what should be gone is gone assert "test_transmission_elec:b" not in model._model_data.techs assert "test_transmission_elec:a" not in model._model_data.techs - assert ( - model._model_data.node_tech.sel(nodes="a", techs="test_transmission_heat:b") - .notnull() - .item() - ) + assert "test_transmission_heat:b" in model._model_data.techs + assert "test_transmission_heat:a" in model._model_data.techs