From fa59017b69d00f2f545ca3aaeb512e89ccdbbdaa Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 2 Oct 2024 17:10:56 +0100 Subject: [PATCH 01/92] Add comments and simplify sector.next --- src/muse/mca.py | 16 +++++------ src/muse/sectors/sector.py | 59 +++++++++++--------------------------- 2 files changed, 25 insertions(+), 50 deletions(-) diff --git a/src/muse/mca.py b/src/muse/mca.py index 55014c705..7fa2cd481 100644 --- a/src/muse/mca.py +++ b/src/muse/mca.py @@ -310,6 +310,7 @@ def run(self) -> None: ) self.carbon_price = future_propagation(self.carbon_price, future_price) + # Solve the market _, new_market, self.sectors = self.find_equilibrium(new_market) # Save sector outputs @@ -324,17 +325,19 @@ def run(self) -> None: new_market, year_idx ) + # Update the market dims = {i: new_market[i] for i in new_market.dims} self.market.supply.loc[dims] = new_market.supply self.market.consumption.loc[dims] = new_market.consumption - dims = {i: new_market[i] for i in new_market.prices.dims if i != "year"} self.market.prices.loc[dims] = future_propagation( self.market.prices.sel(dims), new_market.prices.sel(year=years[1]) ) + # Global outputs self.outputs(self.market, self.sectors, year=self.time_framework[year_idx]) # type: ignore self.outputs_cache.consolidate_cache(year=self.time_framework[year_idx]) + getLogger(__name__).info( f"Finish simulation year {years[0]} ({year_idx+1}/{nyear})!" ) @@ -429,29 +432,26 @@ def single_year_iteration( if "updated_prices" not in market.data_vars: market["updated_prices"] = drop_timeslice(market.prices.copy()) - # eventually, the first market should be one that creates the initial demand for sector in sectors: + # Solve the sector sector_market = sector.next( market[["supply", "consumption", "prices"]] # type:ignore ) - sector_market = sector_market.sel(year=market.year) + # Calculate net consumption dims = {i: sector_market[i] for i in sector_market.consumption.dims} - sector_market.consumption.loc[dims] = ( sector_market.consumption.loc[dims] - sector_market.supply.loc[dims] ).clip(min=0.0, max=None) + # Update market supply and consumption market.consumption.loc[dims] += sector_market.consumption - dims = {i: sector_market[i] for i in sector_market.supply.dims} market.supply.loc[dims] += sector_market.supply + # Update market prices costs = sector_market.costs.sel(commodity=is_enduse(sector_market.comm_usage)) - - # do not write costs lower than 1e-4 - # should correspond to rounding value if len(costs.commodity) > 0: costs = costs.where(costs > 1e-4, 0) dims = {i: costs[i] for i in costs.dims} diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index b25a7455e..0a6328ed2 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -26,7 +26,6 @@ def factory(cls, name: str, settings: Any) -> Sector: from muse.interactions import factory as interaction_factory from muse.outputs.sector import factory as ofactory from muse.production import factory as pfactory - from muse.readers import read_timeslices from muse.readers.toml import read_technodata from muse.utilities import nametuple_to_dict @@ -34,10 +33,6 @@ def factory(cls, name: str, settings: Any) -> Sector: for attribute in ("name", "type", "priority", "path"): sector_settings.pop(attribute, None) - timeslices = read_timeslices( - sector_settings.pop("timeslice_levels", None) - ).get_index("timeslice") - technologies = read_technodata(settings, name, settings.time_framework) if "subsectors" not in sector_settings: @@ -81,7 +76,6 @@ def factory(cls, name: str, settings: Any) -> Sector: name, technologies, subsectors=subsectors, - timeslices=timeslices, supply_prod=supply, outputs=outputs, interactions=interactions, @@ -93,8 +87,6 @@ def __init__( name: str, technologies: xr.Dataset, subsectors: Sequence[Subsector] = [], - timeslices: pd.MultiIndex | None = None, - technodata_timeslices: xr.Dataset = None, interactions: Callable[[Sequence[AbstractAgent]], None] | None = None, interpolation: str = "linear", outputs: Callable | None = None, @@ -110,11 +102,6 @@ def __init__( """Subsectors controlled by this object.""" self.technologies: xr.Dataset = technologies """Parameters describing the sector's technologies.""" - self.timeslices: pd.MultiIndex | None = timeslices - """Timeslice at which this sector operates. - - If None, it will operate using the timeslice of the input market. - """ self.interpolation: Mapping[str, Any] = { "method": interpolation, "kwargs": {"fill_value": "extrapolate"}, @@ -201,41 +188,25 @@ def group_assets(x: xr.DataArray) -> xr.DataArray: current_year = int(mca_market.year.min()) getLogger(__name__).info(f"Running {self.name} for year {current_year}") - # > to sector timeslice - market = self.convert_market_timeslice( - mca_market.sel( - commodity=self.technologies.commodity, region=self.technologies.region - ).interp( - year=sorted( - { - current_year, - current_year + time_period, - current_year + self.forecast, - } - ), - **self.interpolation, - ), - self.timeslices, - ) - # > agent interactions + # Agent interactions self.interactions(list(self.agents)) - # > investment - years = sorted( - set( - market.year.data.tolist() - + self.capacity.installed.data.tolist() - + self.technologies.year.data.tolist() - ) + + # Select appropriate data from the market + market = mca_market.sel( + commodity=self.technologies.commodity, region=self.technologies.region ) - technologies = self.technologies.interp(year=years, **self.interpolation) + # Investments for subsector in self.subsectors: subsector.invest( - technologies, market, time_period=time_period, current_year=current_year + self.technologies, + market, + time_period=time_period, + current_year=current_year, ) # Full output data - supply, consume, costs = self.market_variables(market, technologies) + supply, consume, costs = self.market_variables(market, self.technologies) self.output_data = xr.Dataset( dict( supply=supply, @@ -287,7 +258,9 @@ def group_assets(x: xr.DataArray) -> xr.DataArray: dict(supply=supply, consumption=consumption, costs=costs) ) result = self.convert_market_timeslice(result, mca_market.timeslice) - result["comm_usage"] = technologies.comm_usage.sel(commodity=result.commodity) + result["comm_usage"] = self.technologies.comm_usage.sel( + commodity=result.commodity + ) result.set_coords("comm_usage") return result @@ -306,15 +279,17 @@ def market_variables(self, market: xr.Dataset, technologies: xr.Dataset) -> Any: years = market.year.values capacity = self.capacity.interp(year=years, **self.interpolation) + # Calculate supply supply = self.supply_prod( market=market, capacity=capacity, technologies=technologies ) - if "timeslice" in market.prices.dims and "timeslice" not in supply.dims: supply = convert_timeslice(supply, market.timeslice, QuantityType.EXTENSIVE) + # Calculate consumption consume = consumption(technologies, supply, market.prices) + # Calculate commodity prices technodata = cast(xr.Dataset, broadcast_techs(technologies, supply)) costs = supply_cost( supply.where(~is_pollutant(supply.comm_usage), 0), From 697ff3f30a2f262a6f1cac39c0de7853298e7b54 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 3 Oct 2024 11:27:03 +0100 Subject: [PATCH 02/92] Simplify agent module, more comments --- src/muse/agents/agent.py | 82 +++++++++++++----------------- src/muse/sectors/sector.py | 15 ++++-- src/muse/sectors/subsector.py | 93 ++++++++++++++++++----------------- 3 files changed, 94 insertions(+), 96 deletions(-) diff --git a/src/muse/agents/agent.py b/src/muse/agents/agent.py index fb34dffb4..a6d45e304 100644 --- a/src/muse/agents/agent.py +++ b/src/muse/agents/agent.py @@ -245,40 +245,13 @@ def asset_housekeeping(self): # state. self.assets = self._housekeeping(self, self.assets) - def next( + def compute_decision( self, technologies: xr.Dataset, market: xr.Dataset, demand: xr.DataArray, - time_period: int = 1, - ) -> Optional[xr.Dataset]: - """Iterates agent one turn. - - The goal is to figure out from market variables which technologies to - invest in and by how much. - - This function will modify `self.assets` and increment `self.year`. - Other attributes are left unchanged. Arguments to the function are - never modified. - """ - from logging import getLogger - - # dataset with intermediate computational results from search - # makes it easier to pass intermediate results to functions, as well as - # filter them when inside a function - if demand.size == 0 or demand.sum() < 1e-12: - self.year += time_period - return None - - search_space = ( - self.search_rules(self, demand, technologies, market).fillna(0).astype(int) - ) - - if any(u == 0 for u in search_space.shape): - getLogger(__name__).critical("Search space is empty") - self.year += time_period - return None - + search_space, + ): # Filter technologies according to the search space, forecast year and region techs = self.filter_input( technologies, @@ -297,23 +270,12 @@ def next( # Filter prices according to the region prices = self.filter_input(market.prices) - # Compute the objective - decision = self._compute_objective( + # Compute the objectives + objectives = self.objectives( technologies=techs, demand=reduced_demand, prices=prices ) - self.year += time_period - return xr.Dataset(dict(search_space=search_space, decision=decision)) - - def _compute_objective( - self, - technologies: xr.Dataset, - demand: xr.DataArray, - prices: xr.DataArray, - ) -> xr.DataArray: - objectives = self.objectives( - technologies=technologies, demand=demand, prices=prices - ) + # Compute the decision metric decision = self.decision(objectives) return decision @@ -433,20 +395,42 @@ def next( Other attributes are left unchanged. Arguments to the function are never modified. """ + from logging import getLogger + current_year = self.year - search = super().next(technologies, market, demand, time_period=time_period) - if search is None: + + # Skip forward if demand is zero + if demand.size == 0 or demand.sum() < 1e-12: + self.year += time_period return None + # Calculate the search space + search_space = ( + self.search_rules(self, demand, technologies, market).fillna(0).astype(int) + ) + + # Skip forward if the search space is empty + if any(u == 0 for u in search_space.shape): + getLogger(__name__).critical("Search space is empty") + self.year += time_period + return None + + # Calculate the decision metric + decision = self.compute_decision(technologies, market, demand, search_space) + search = xr.Dataset(dict(search_space=search_space, decision=decision)) if "timeslice" in search.dims: search["demand"] = drop_timeslice(demand) else: search["demand"] = demand + + # Filter assets with demand not_assets = [u for u in search.demand.dims if u != "asset"] condtechs = ( search.demand.sum(not_assets) > getattr(self, "tolerance", 1e-8) ).values search = search.sel(asset=condtechs) + + # Calculate constraints constraints = self.constraints( search.demand, self.assets, @@ -456,6 +440,7 @@ def next( year=current_year, ) + # Calculate investments investments = self.invest( search[["search_space", "decision"]], technologies, @@ -463,9 +448,12 @@ def next( year=current_year, ) + # Add investments self.add_investments( technologies, investments, - current_year=self.year - time_period, + current_year=current_year, time_period=time_period, ) + + self.year += time_period diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index 0a6328ed2..1dc42dd52 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -29,16 +29,19 @@ def factory(cls, name: str, settings: Any) -> Sector: from muse.readers.toml import read_technodata from muse.utilities import nametuple_to_dict + # Read sector settings sector_settings = getattr(settings.sectors, name)._asdict() for attribute in ("name", "type", "priority", "path"): sector_settings.pop(attribute, None) - - technologies = read_technodata(settings, name, settings.time_framework) - if "subsectors" not in sector_settings: raise RuntimeError(f"Missing 'subsectors' section in sector {name}") if len(sector_settings["subsectors"]._asdict()) == 0: raise RuntimeError(f"Empty 'subsectors' section in sector {name}") + + # Read technologies + technologies = read_technodata(settings, name, settings.time_framework) + + # Create subsectors subsectors = [ Subsector.factory( subsec_settings, @@ -51,14 +54,18 @@ def factory(cls, name: str, settings: Any) -> Sector: ._asdict() .items() ] + + # Check that subsector commodities are disjoint are_disjoint_commodities = sum(len(s.commodities) for s in subsectors) == len( set().union(*(set(s.commodities) for s in subsectors)) # type: ignore ) if not are_disjoint_commodities: raise RuntimeError("Subsector commodities are not disjoint") + # Create outputs outputs = ofactory(*sector_settings.pop("outputs", []), sector_name=name) + # supply_args = sector_settings.pop( "supply", sector_settings.pop("dispatch_production", {}) ) @@ -68,8 +75,10 @@ def factory(cls, name: str, settings: Any) -> Sector: supply_args = nametuple_to_dict(supply_args) supply = pfactory(**supply_args) + # Create interactions interactions = interaction_factory(sector_settings.pop("interactions", None)) + # Create sector for attr in ("technodata", "commodities_out", "commodities_in"): sector_settings.pop(attr, None) return cls( diff --git a/src/muse/sectors/subsector.py b/src/muse/sectors/subsector.py index 26ba1cb27..d37045179 100644 --- a/src/muse/sectors/subsector.py +++ b/src/muse/sectors/subsector.py @@ -1,10 +1,9 @@ from __future__ import annotations -from collections.abc import Hashable, MutableMapping, Sequence +from collections.abc import Sequence from typing import ( Any, Callable, - cast, ) import numpy as np @@ -51,35 +50,35 @@ def invest( self, technologies: xr.Dataset, market: xr.Dataset, - time_period: int = 5, - current_year: int | None = None, + time_period: int, + current_year: int, ) -> None: - if current_year is None: - current_year = market.year.min() + # Expand prices to include destination region (for trade models) if self.expand_market_prices: market = market.copy() market["prices"] = drop_timeslice( np.maximum(market.prices, market.prices.rename(region="dst_region")) ) + # Agent housekeeping for agent in self.agents: agent.asset_housekeeping() - lp_problem = self.aggregate_lp( - technologies, market, time_period, current_year=current_year - ) - if lp_problem is None: - return + # Perform the investment + self.aggregate_lp(technologies, market, time_period, current_year=current_year) + # if lp_problem is None: + # return - years = technologies.year - techs = technologies.interp(year=years) - techs = techs.sel(year=current_year + time_period) + # # If there is a problem with the LP... + # years = technologies.year + # techs = technologies.interp(year=years) + # techs = techs.sel(year=current_year + time_period) - solution = self.investment( - search=lp_problem[0], technologies=techs, constraints=lp_problem[1] - ) + # solution = self.investment( + # search=lp_problem[0], technologies=techs, constraints=lp_problem[1] + # ) - self.assign_back_to_agents(technologies, solution, current_year, time_period) + # self.assign_back_to_agents(technologies, solution, current_year, time_period) def assign_back_to_agents( self, @@ -99,14 +98,12 @@ def aggregate_lp( self, technologies: xr.Dataset, market: xr.Dataset, - time_period: int = 5, - current_year: int | None = None, - ) -> tuple[xr.Dataset, Sequence[xr.Dataset]] | None: + time_period, + current_year, + ): from muse.utilities import agent_concatenation, reduce_assets - if current_year is None: - current_year = market.year.min() - + # Split demand across agents demands = self.demand_share( self.agents, market, @@ -122,42 +119,46 @@ def aggregate_lp( dimension. """ raise ValueError(msg) - agent_market = market.copy() + + # Concatenate assets assets = agent_concatenation( {agent.uuid: agent.assets for agent in self.agents} ) + + # Calculate existing capacity + agent_market = market.copy() agent_market["capacity"] = ( reduce_assets(assets.capacity, coords=("region", "technology")) .interp(year=market.year, method="linear", kwargs={"fill_value": 0.0}) .swap_dims(dict(asset="technology")) ) - agent_lps: MutableMapping[Hashable, xr.Dataset] = {} + # agent_lps: MutableMapping[Hashable, xr.Dataset] = {} for agent in self.agents: if "agent" in demands.coords: share = demands.sel(asset=demands.agent == agent.uuid) else: share = demands - result = agent.next( - technologies, agent_market, share, time_period=time_period - ) - if result is not None: - agent_lps[agent.uuid] = result - - if len(agent_lps) == 0: - return None - - lps = cast(xr.Dataset, agent_concatenation(agent_lps, dim="agent")) - coords = {"agent", "technology", "region"}.intersection(assets.asset.coords) - constraints = self.constraints( - demand=demands, - assets=reduce_assets(assets, coords=coords).set_coords(coords), - search_space=lps.search_space, - market=market, - technologies=technologies, - year=current_year, - ) - return lps, constraints + + # Compute investments for the agent + agent.next(technologies, agent_market, share, time_period=time_period) + # if result is not None: + # agent_lps[agent.uuid] = result + + # if len(agent_lps) == 0: + # return None + + # lps = cast(xr.Dataset, agent_concatenation(agent_lps, dim="agent")) + # coords = {"agent", "technology", "region"}.intersection(assets.asset.coords) + # constraints = self.constraints( + # demand=demands, + # assets=reduce_assets(assets, coords=coords).set_coords(coords), + # search_space=lps.search_space, + # market=market, + # technologies=technologies, + # year=current_year, + # ) + # return lps, constraints @classmethod def factory( From b47c811957c92548257c107108229a969c8fd477 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 3 Oct 2024 14:15:51 +0100 Subject: [PATCH 03/92] Simplify retirment profile code --- src/muse/agents/agent.py | 166 ++++++++++++++++++---------------- src/muse/investments.py | 35 +++---- src/muse/sectors/subsector.py | 36 +------- tests/test_investments.py | 2 +- 4 files changed, 103 insertions(+), 136 deletions(-) diff --git a/src/muse/agents/agent.py b/src/muse/agents/agent.py index a6d45e304..8efdd607e 100644 --- a/src/muse/agents/agent.py +++ b/src/muse/agents/agent.py @@ -80,14 +80,9 @@ def next( technologies: xr.Dataset, market: xr.Dataset, demand: xr.DataArray, - time_period: int = 1, + time_period: int, ): - """Iterates agent one turn. - - The goal is to figure out from market variables which technologies to invest in - and by how much. - """ - pass + """Increments agent to the next time point (e.g. performing investments).""" def __repr__(self): return ( @@ -124,7 +119,7 @@ def __init__( spend_limit: int = 0, **kwargs, ): - """Creates a standard buildings agent. + """Creates a standard agent. Arguments: name: Name of the agent, used for cross-refencing external tables @@ -166,9 +161,7 @@ def __init__( ) self.year = year - """ Current year. - - The year is incremented by one every time next is called. + """ Current year. Incremented by one every time next is called. """ self.forecast = forecast """Number of years to look into the future for forecating purposed.""" @@ -245,6 +238,15 @@ def asset_housekeeping(self): # state. self.assets = self._housekeeping(self, self.assets) + def next( + self, + technologies: xr.Dataset, + market: xr.Dataset, + demand: xr.DataArray, + time_period: int, + ): + self.year += time_period + def compute_decision( self, technologies: xr.Dataset, @@ -279,73 +281,6 @@ def compute_decision( decision = self.decision(objectives) return decision - def add_investments( - self, - technologies: xr.Dataset, - investments: xr.DataArray, - current_year: int, - time_period: int, - ): - """Add new assets to the agent.""" - new_capacity = self.retirement_profile( - technologies, investments, current_year, time_period - ) - - if new_capacity is None: - return - new_capacity = new_capacity.drop_vars( - set(new_capacity.coords) - set(self.assets.coords) - ) - new_assets = xr.Dataset(dict(capacity=new_capacity)) - - self.assets = self.merge_transform(self.assets, new_assets) - - def retirement_profile( - self, - technologies: xr.Dataset, - investments: xr.DataArray, - current_year: int, - time_period: int, - ) -> Optional[xr.DataArray]: - from muse.investments import cliff_retirement_profile - - if "asset" in investments.dims: - investments = investments.sum("asset") - if "agent" in investments.dims: - investments = investments.squeeze("agent", drop=True) - investments = investments.sel( - replacement=(investments > self.asset_threshold).any( - [d for d in investments.dims if d != "replacement"] - ) - ) - if investments.size == 0: - return None - - # figures out the retirement profile for the new investments - lifetime = self.filter_input( - technologies.technical_life, - year=current_year, - technology=investments.replacement, - ) - profile = cliff_retirement_profile( - lifetime.clip(min=time_period), - current_year=current_year + time_period, - protected=max(self.forecast - time_period - 1, 0), - ) - if "dst_region" in investments.coords: - investments = investments.reindex_like(profile, method="ffill") - - new_assets = (investments * profile).rename(replacement="asset") - - new_assets["installed"] = "asset", [current_year] * len(new_assets.asset) - - # The new assets have picked up quite a few coordinates along the way. - # we try and keep only those that were there originally. - if set(new_assets.dims) != set(self.assets.dims): - new, old = new_assets.dims, self.assets.dims - raise RuntimeError(f"Asset dimensions do not match: {new} vs {old}") - return new_assets - class InvestingAgent(Agent): """Agent that performs investment for itself.""" @@ -357,7 +292,7 @@ def __init__( investment: Optional[Callable] = None, **kwargs, ): - """Creates a standard buildings agent. + """Creates an investing agent. Arguments: *args: See :py:class:`~muse.agents.agent.Agent` @@ -384,7 +319,7 @@ def next( technologies: xr.Dataset, market: xr.Dataset, demand: xr.DataArray, - time_period: int = 1, + time_period: int, ): """Iterates agent one turn. @@ -456,4 +391,75 @@ def next( time_period=time_period, ) + # Increment the year self.year += time_period + + def add_investments( + self, + technologies: xr.Dataset, + investments: xr.DataArray, + current_year: int, + time_period: int, + ): + """Add new assets to the agent.""" + new_capacity = self.retirement_profile( + technologies, investments, current_year, time_period + ) + + if new_capacity is None: + return + new_capacity = new_capacity.drop_vars( + set(new_capacity.coords) - set(self.assets.coords) + ) + new_assets = xr.Dataset(dict(capacity=new_capacity)) + + self.assets = self.merge_transform(self.assets, new_assets) + + def retirement_profile( + self, + technologies: xr.Dataset, + investments: xr.DataArray, + current_year: int, + time_period: int, + ) -> Optional[xr.DataArray]: + from muse.investments import cliff_retirement_profile + + # Sum investments + if "asset" in investments.dims: + investments = investments.sum("asset") + if "agent" in investments.dims: + investments = investments.squeeze("agent", drop=True) + + # Filter out investments below the threshold + investments = investments.sel( + replacement=(investments > self.asset_threshold).any( + [d for d in investments.dims if d != "replacement"] + ) + ) + if investments.size == 0: + return None + + # Calculate the retirement profile for new investments + # Note: technical life must be at least the length of the time period + lifetime = self.filter_input( + technologies.technical_life, + year=current_year, + technology=investments.replacement, + ).clip(min=time_period) + profile = cliff_retirement_profile( + lifetime, + investment_year=current_year + time_period, + ) + if "dst_region" in investments.coords: + investments = investments.reindex_like(profile, method="ffill") + + # Apply the retirement profile to the investments + new_assets = (investments * profile).rename(replacement="asset") + new_assets["installed"] = "asset", [current_year] * len(new_assets.asset) + + # The new assets have picked up quite a few coordinates along the way. + # we try and keep only those that were there originally. + if set(new_assets.dims) != set(self.assets.dims): + new, old = new_assets.dims, self.assets.dims + raise RuntimeError(f"Asset dimensions do not match: {new} vs {old}") + return new_assets diff --git a/src/muse/investments.py b/src/muse/investments.py index 7d92e5c7b..08a000b19 100644 --- a/src/muse/investments.py +++ b/src/muse/investments.py @@ -175,8 +175,7 @@ def compute_investment( def cliff_retirement_profile( technical_life: xr.DataArray, - current_year: int = 0, - protected: int = 0, + investment_year: int, interpolation: str = "linear", **kwargs, ) -> xr.DataArray: @@ -186,19 +185,13 @@ def cliff_retirement_profile( Assets with a technical life smaller than the input time-period should automatically be renewed. - Hence, if ``technical_life <= protected``, then effectively, the technical life is - rewritten as ``technical_life * n`` with ``n = int(protected // technical_life) + - 1``. - We could just return an array where each year is represented. Instead, to save memory, we return a compact view of the same where years where no change happens are removed. Arguments: technical_life: lifetimes for each technology - current_year: current year - protected: The technologies are assumed to be renewed between years - `current_year` and `current_year + protected` + investment_year: The year in which the investment is made interpolation: Interpolation type **kwargs: arguments by which to filter technical_life, if any. @@ -211,26 +204,26 @@ def cliff_retirement_profile( if kwargs: technical_life = technical_life.sel(**kwargs) if "year" in technical_life.dims: - technical_life = technical_life.interp(year=current_year, method=interpolation) - technical_life = (1 + protected // technical_life) * technical_life # type:ignore + technical_life = technical_life.interp( + year=investment_year, method=interpolation + ) + # Create profile across all years if len(technical_life) > 0: - max_year = int(current_year + technical_life.max()) + max_year = int(investment_year + technical_life.max()) else: - max_year = int(current_year + protected) + max_year = investment_year allyears = xr.DataArray( - range(current_year, max_year + 1), + range(investment_year, max_year + 1), dims="year", - coords={"year": range(current_year, max_year + 1)}, + coords={"year": range(investment_year, max_year + 1)}, ) + profile = allyears < (investment_year + technical_life) # type: ignore - profile = allyears < (current_year + technical_life) # type: ignore - - # now we minimize the number of years needed to represent the profile fully - # this is done by removing the central year of any three repeating year, ensuring - # the removed year can be recovered by a linear interpolation. + # Minimize the number of years needed to represent the profile fully + # This is done by removing the central year of any three repeating years, ensuring + # the removed year can be recovered by linear interpolation. goodyears = avoid_repetitions(profile.astype(int)) - return profile.sel(year=goodyears).astype(bool) diff --git a/src/muse/sectors/subsector.py b/src/muse/sectors/subsector.py index d37045179..3beb7efa9 100644 --- a/src/muse/sectors/subsector.py +++ b/src/muse/sectors/subsector.py @@ -64,21 +64,8 @@ def invest( for agent in self.agents: agent.asset_housekeeping() - # Perform the investment + # Perform the investments self.aggregate_lp(technologies, market, time_period, current_year=current_year) - # if lp_problem is None: - # return - - # # If there is a problem with the LP... - # years = technologies.year - # techs = technologies.interp(year=years) - # techs = techs.sel(year=current_year + time_period) - - # solution = self.investment( - # search=lp_problem[0], technologies=techs, constraints=lp_problem[1] - # ) - - # self.assign_back_to_agents(technologies, solution, current_year, time_period) def assign_back_to_agents( self, @@ -133,32 +120,13 @@ def aggregate_lp( .swap_dims(dict(asset="technology")) ) - # agent_lps: MutableMapping[Hashable, xr.Dataset] = {} + # Increment each agent (perform investments) for agent in self.agents: if "agent" in demands.coords: share = demands.sel(asset=demands.agent == agent.uuid) else: share = demands - - # Compute investments for the agent agent.next(technologies, agent_market, share, time_period=time_period) - # if result is not None: - # agent_lps[agent.uuid] = result - - # if len(agent_lps) == 0: - # return None - - # lps = cast(xr.Dataset, agent_concatenation(agent_lps, dim="agent")) - # coords = {"agent", "technology", "region"}.intersection(assets.asset.coords) - # constraints = self.constraints( - # demand=demands, - # assets=reduce_assets(assets, coords=coords).set_coords(coords), - # search_space=lps.search_space, - # market=market, - # technologies=technologies, - # year=current_year, - # ) - # return lps, constraints @classmethod def factory( diff --git a/tests/test_investments.py b/tests/test_investments.py index 92b9abbbc..415f0c9f5 100644 --- a/tests/test_investments.py +++ b/tests/test_investments.py @@ -75,7 +75,7 @@ def test_cliff_retirement_random_profile(protected): current = 5 profile = cliff_retirement_profile( - lifetime, current_year=current, protected=protected + lifetime, investment_year=current, protected=protected ) assert profile.year.min() == current assert profile.year.max() <= current + effective_lifetime.max() + 1 From a86e07f7f5f3cc93e9868587e5be4a2bd247ecfa Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 3 Oct 2024 14:56:04 +0100 Subject: [PATCH 04/92] Simplify merge_assets --- src/muse/agents/agent.py | 3 ++- src/muse/utilities.py | 50 +++++++++++++++------------------------- 2 files changed, 21 insertions(+), 32 deletions(-) diff --git a/src/muse/agents/agent.py b/src/muse/agents/agent.py index 8efdd607e..6a014e343 100644 --- a/src/muse/agents/agent.py +++ b/src/muse/agents/agent.py @@ -402,10 +402,10 @@ def add_investments( time_period: int, ): """Add new assets to the agent.""" + # Calculate retirement profile of new assets new_capacity = self.retirement_profile( technologies, investments, current_year, time_period ) - if new_capacity is None: return new_capacity = new_capacity.drop_vars( @@ -413,6 +413,7 @@ def add_investments( ) new_assets = xr.Dataset(dict(capacity=new_capacity)) + # Merge new assets with existing assets self.assets = self.merge_transform(self.assets, new_assets) def retirement_profile( diff --git a/src/muse/utilities.py b/src/muse/utilities.py index a2afd0093..6a8e4e09e 100644 --- a/src/muse/utilities.py +++ b/src/muse/utilities.py @@ -385,41 +385,29 @@ def merge_assets( dimension: str = "asset", ) -> xr.DataArray: """Merge two capacity arrays.""" + # Interpolate capacity arrays to a common time framework years = sorted(set(capa_a.year.values).union(capa_b.year.values)) - if len(capa_a.year) == 1: - result = xr.concat( - ( - capa_a, - capa_b.interp(year=years, method=interpolation).fillna(0), - ), - dim=dimension, - ).fillna(0) + capa_a_interp = capa_a + capa_b_interp = capa_b.interp(year=years, method=interpolation).fillna(0) elif len(capa_b.year) == 1: - result = xr.concat( - ( - capa_a.interp(year=years, method=interpolation).fillna(0), - capa_b, - ), - dim=dimension, - ).fillna(0) + capa_a_interp = capa_a.interp(year=years, method=interpolation).fillna(0) + capa_b_interp = capa_b else: - result = xr.concat( - ( - capa_a.interp(year=years, method=interpolation).fillna(0), - capa_b.interp(year=years, method=interpolation).fillna(0), - ), - dim=dimension, - ) - forgroup = result.pipe(coords_to_multiindex, dimension=dimension) - if len(forgroup[dimension]) != len(set(forgroup[dimension].values)): - result = ( - forgroup.groupby(dimension) - .sum(dimension) - .clip(min=0) - .pipe(multiindex_to_coords, dimension=dimension) - ) - return result + capa_a_interp = capa_a.interp(year=years, method=interpolation).fillna(0) + capa_b_interp = capa_b.interp(year=years, method=interpolation).fillna(0) + + # Concatenate the two capacity arrays + result = xr.concat((capa_a_interp, capa_b_interp), dim=dimension) + + # forgroup = result.pipe(coords_to_multiindex, dimension=dimension) + # result = ( + # forgroup.groupby(dimension) + # .sum(dimension) + # .clip(min=0) + # .pipe(multiindex_to_coords, dimension=dimension) + # ) + return result.clip(min=0) def avoid_repetitions(data: xr.DataArray, dim: str = "year") -> xr.DataArray: From 3e513111cbc1c15ac2acefc0bd442034300d7735 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 4 Oct 2024 10:06:02 +0100 Subject: [PATCH 05/92] Revert change to merge_assets --- src/muse/utilities.py | 30 +++++++++++++++++++++--------- 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/src/muse/utilities.py b/src/muse/utilities.py index 6a8e4e09e..120949ee6 100644 --- a/src/muse/utilities.py +++ b/src/muse/utilities.py @@ -141,6 +141,8 @@ def reduce_assets( """ from copy import copy + assets = copy(assets) + if operation is None: def operation(x): @@ -148,23 +150,31 @@ def operation(x): assert operation is not None + # Concatenate assets if a sequence is given if not isinstance(assets, (xr.Dataset, xr.DataArray)): assets = xr.concat(assets, dim=dim) assert isinstance(assets, (xr.Dataset, xr.DataArray)) + + # If there are no assets, nothing needs to be done if assets[dim].size == 0: return assets + + # Coordinates to reduce over (e.g. technology, installed) if coords is None: coords = [cast(str, k) for k, v in assets.coords.items() if v.dims == (dim,)] elif isinstance(coords, str): coords = (coords,) coords = [k for k in coords if k in assets.coords and assets[k].dims == (dim,)] - assets = copy(assets) + + # Create a new dimension to group by dtypes = [(d, assets[d].dtype) for d in coords] grouper = np.array( list(zip(*(cast(Iterator, assets[d].values) for d in coords))), dtype=dtypes ) assert "grouper" not in assets.coords assets["grouper"] = "asset", grouper + + # Perform the operation result = operation(assets.groupby("grouper")).rename(grouper=dim) for i, d in enumerate(coords): result[d] = dim, [u[i] for u in result[dim].values] @@ -400,14 +410,16 @@ def merge_assets( # Concatenate the two capacity arrays result = xr.concat((capa_a_interp, capa_b_interp), dim=dimension) - # forgroup = result.pipe(coords_to_multiindex, dimension=dimension) - # result = ( - # forgroup.groupby(dimension) - # .sum(dimension) - # .clip(min=0) - # .pipe(multiindex_to_coords, dimension=dimension) - # ) - return result.clip(min=0) + # + forgroup = result.pipe(coords_to_multiindex, dimension=dimension) + if len(forgroup[dimension]) != len(set(forgroup[dimension].values)): + result = ( + forgroup.groupby(dimension) + .sum(dimension) + .clip(min=0) + .pipe(multiindex_to_coords, dimension=dimension) + ) + return result def avoid_repetitions(data: xr.DataArray, dim: str = "year") -> xr.DataArray: From 941a5a6824a162411bd1cdf0cc055649cf4fc3aa Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 4 Oct 2024 10:07:36 +0100 Subject: [PATCH 06/92] Delete unused factory --- src/muse/agents/__init__.py | 2 +- src/muse/agents/factories.py | 91 ------------------------------------ 2 files changed, 1 insertion(+), 92 deletions(-) diff --git a/src/muse/agents/__init__.py b/src/muse/agents/__init__.py index 069f1c2ef..926d1ee14 100644 --- a/src/muse/agents/__init__.py +++ b/src/muse/agents/__init__.py @@ -8,4 +8,4 @@ ] from muse.agents.agent import AbstractAgent, Agent, InvestingAgent -from muse.agents.factories import agents_factory, create_agent, factory +from muse.agents.factories import agents_factory, create_agent diff --git a/src/muse/agents/factories.py b/src/muse/agents/factories.py index efd7056fd..236eb24eb 100644 --- a/src/muse/agents/factories.py +++ b/src/muse/agents/factories.py @@ -7,7 +7,6 @@ import xarray as xr from muse.agents.agent import Agent, InvestingAgent -from muse.defaults import DEFAULT_SECTORS_DIRECTORY from muse.errors import RetrofitAgentNotDefined, TechnologyNotDefined @@ -173,96 +172,6 @@ def create_agent(agent_type: str, **kwargs) -> Agent: return method(**kwargs) # type: ignore -def factory( - existing_capacity_path: Optional[Union[Path, str]] = None, - agent_parameters_path: Optional[Union[Path, str]] = None, - technodata_path: Optional[Union[Path, str]] = None, - technodata_timeslices_path: Optional[Union[str, Path]] = None, - sector: Optional[str] = None, - sectors_directory: Union[str, Path] = DEFAULT_SECTORS_DIRECTORY, - baseyear: int = 2010, -) -> list[Agent]: - """Reads list of agents from standard MUSE input files.""" - from copy import deepcopy - from logging import getLogger - from textwrap import dedent - - from muse.readers import ( - read_csv_agent_parameters, - read_initial_assets, - read_technodata_timeslices, - read_technodictionary, - ) - from muse.readers.csv import find_sectors_file - - if sector is None: - assert existing_capacity_path is not None - assert agent_parameters_path is not None - assert technodata_path is not None - - if existing_capacity_path is None: - existing_capacity_path = find_sectors_file( - f"Existing{sector}.csv", sector, sectors_directory - ) - if agent_parameters_path is None: - agent_parameters_path = find_sectors_file( - f"BuildingAgent{sector}.csv", sector, sectors_directory - ) - if technodata_path is None: - technodata_path = find_sectors_file( - f"technodata{sector}.csv", sector, sectors_directory - ) - - params = read_csv_agent_parameters(agent_parameters_path) - techno = read_technodictionary(technodata_path) - capa = read_initial_assets(existing_capacity_path) - if technodata_timeslices_path and isinstance( - technodata_timeslices_path, (str, Path) - ): - technodata_timeslices = read_technodata_timeslices(technodata_timeslices_path) - else: - technodata_timeslices = None - result = [] - for param in params: - if param["agent_type"] == "retrofit": - param["technologies"] = techno.sel(region=param["region"]) - if technodata_timeslices is not None: - param.drop_vars("utilization_factor") - param = param.merge(technodata_timeslices.sel(region=param["region"])) - param["category"] = param["agent_type"] - param["capacity"] = deepcopy(capa.sel(region=param["region"])) - param["year"] = baseyear - result.append(create_agent(**param)) - - nregs = len({u.region for u in result}) - types = [u.name for u in result] - msg = dedent( - """\ - Read agents for sector {name} from: - - agent parameter file {para} - - technologies data file {tech} - - initial capacity file {ini} - - Found {n} agents across {nregs} regions{end} - """.format( - n=len(result), - name=sector, - para=agent_parameters_path, - tech=technodata_path, - ini=existing_capacity_path, - nregs=nregs, - end="." if len(result) == 0 else ", with:\n", - ) - ) - for t in set(types): - n = types.count(t) - msg += " - {n} {t} agent{plural}\n".format( - n=n, t=t, plural="" if n == 1 else "s" - ) - getLogger(__name__).info(msg) - return result - - def agents_factory( params_or_path: Union[str, Path, list], capacity: Union[xr.DataArray, str, Path], From 9d2cac9c4be1729193e2483373ff5a35f83ce974 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 4 Oct 2024 10:08:01 +0100 Subject: [PATCH 07/92] More comments added to code --- src/muse/demand_share.py | 93 +++++++++++++++++++++++++--------------- src/muse/investments.py | 14 +++++- 2 files changed, 70 insertions(+), 37 deletions(-) diff --git a/src/muse/demand_share.py b/src/muse/demand_share.py index f95e43f30..f913c5fbc 100644 --- a/src/muse/demand_share.py +++ b/src/muse/demand_share.py @@ -363,8 +363,15 @@ def decommissioning(capacity): if current_year is None: current_year = market.year.min() + # Make sure there are no retrofit agents + for agent in agents: + if agent.category == "retrofit": + raise RetrofitAgentInStandardDemandShare() + + # Calculate existing capacity capacity = reduce_assets([agent.assets.capacity for agent in agents]) + # Calculate new and retrofit demands demands = new_and_retro_demands( capacity, market, @@ -374,34 +381,32 @@ def decommissioning(capacity): forecast=forecast, ) + # Only consider end-use commodities demands = demands.where( is_enduse(technologies.comm_usage.sel(commodity=demands.commodity)), 0 ) - for agent in agents: - if agent.category == "retrofit": - raise RetrofitAgentInStandardDemandShare() - id_to_share: MutableMapping[Hashable, xr.DataArray] = {} for region in demands.region.values: + # Calculate current capacity current_capacity: MutableMapping[Hashable, xr.DataArray] = { agent.uuid: agent.assets.capacity for agent in agents if agent.region == region } + + # Split demands between agents id_to_quantity = { agent.uuid: (agent.name, agent.region, agent.quantity) for agent in agents if agent.region == region } - retro_demands: MutableMapping[Hashable, xr.DataArray] = _inner_split( current_capacity, demands.retrofit.sel(region=region), decommissioning, id_to_quantity, ) - new_demands = _inner_split( current_capacity, demands.new.sel(region=region), @@ -413,6 +418,7 @@ def decommissioning(capacity): id_to_quantity, ) + # Sum new and retrofit demands total_demands = { k: new_demands[k] + retro_demands[k] for k in new_demands.keys() } @@ -527,14 +533,21 @@ def unmet_demand( prod_method = production if callable(production) else prod_factory(production) assert callable(prod_method) + + # Calculate production by existing assets produced = prod_method(market=market, capacity=capacity, technologies=technologies) + + # Total commodity production by summing over assets if "dst_region" in produced.dims: produced = produced.sum("asset").rename(dst_region="region") elif "region" in produced.coords and produced.region.dims: produced = produced.groupby("region").sum("asset") else: produced = produced.sum("asset") - return (market.consumption - produced).clip(min=0) + + # Unmet demand is the difference between the consumption and the production + unmet_demand = (market.consumption - produced).clip(min=0) + return unmet_demand def new_consumption( @@ -565,20 +578,23 @@ def new_consumption( if current_year is None: current_year = market.year.min() - ts_capa = convert_timeslice( - capacity.interp(year=current_year), market.timeslice, QuantityType.EXTENSIVE - ) + # Interpolate market to forecast year + market = market.interp(year=[current_year, current_year + forecast]) + current = market.sel(year=current_year, drop=True) + forecasted = market.sel(year=current_year + forecast, drop=True) + + # Calculate the increase in consumption over the forecast period + delta = (forecasted.consumption - current.consumption).clip(min=0) + + # Capacity in the forecast year ts_capa = convert_timeslice( capacity.interp(year=current_year + forecast), market.timeslice, QuantityType.EXTENSIVE, ) assert isinstance(ts_capa, xr.DataArray) - market = market.interp(year=[current_year, current_year + forecast]) - current = market.sel(year=current_year, drop=True) - forecasted = market.sel(year=current_year + forecast, drop=True) - delta = (forecasted.consumption - current.consumption).clip(min=0) + # missing = unmet_demand(current, ts_capa, technologies) consumption = minimum(delta, missing) return consumption @@ -612,23 +628,28 @@ def new_and_retro_demands( if current_year is None: current_year = market.year.min() + # Interpolate market to forecast year smarket: xr.Dataset = market.interp(year=[current_year, current_year + forecast]) + + # Split capacity between timeslices ts_capa = convert_timeslice( capacity.interp(year=[current_year, current_year + forecast]), market.timeslice, QuantityType.EXTENSIVE, ) - assert isinstance(ts_capa, xr.DataArray) + if hasattr(ts_capa, "region") and ts_capa.region.dims == (): ts_capa["region"] = "asset", [str(ts_capa.region.values)] * len(ts_capa.asset) + # Calculate demand to allocate to "new" agents new_demand = new_consumption( ts_capa, smarket, technologies, current_year=current_year, forecast=forecast ) if "year" in new_demand.dims: new_demand = new_demand.squeeze("year") + # Total production in the forecast year by existing assets service = ( production_method( smarket.sel(year=current_year + forecast), @@ -638,37 +659,39 @@ def new_and_retro_demands( .groupby("region") .sum("asset") ) - # existing asset should not execute beyond demand + + # Existing asset should not execute beyond demand service = minimum( service, smarket.consumption.sel(year=current_year + forecast, drop=True) ) + + # Leftover demand that cannot be serviced by existing assets or "new" agents retro_demand = ( smarket.consumption.sel(year=current_year + forecast, drop=True) - new_demand - service ).clip(min=0) - if "year" in retro_demand.dims: retro_demand = retro_demand.squeeze("year") return xr.Dataset({"new": new_demand, "retrofit": retro_demand}) -def new_demand( - capacity: xr.DataArray, - market: xr.Dataset, - technologies: xr.Dataset, - production: Union[str, Mapping, Callable] = "maximum_production", - current_year: Optional[int] = None, - forecast: int = 5, -) -> xr.DataArray: - """Calculates the new demand that needs to be covered. - - It groups the demand related to an increase in consumption as well as the existing - demand associated with decommissoned assets. Internally, it just calls - `new_and_retro` demands and adds together both components. - """ - demand = new_and_retro_demands( - capacity, market, technologies, production, current_year, forecast - ) - return (demand["new"] + demand["retrofit"]).rename("demand") +# def new_demand( +# capacity: xr.DataArray, +# market: xr.Dataset, +# technologies: xr.Dataset, +# production: Union[str, Mapping, Callable] = "maximum_production", +# current_year: Optional[int] = None, +# forecast: int = 5, +# ) -> xr.DataArray: +# """Calculates the new demand that needs to be covered. + +# It groups the demand related to an increase in consumption as well as the existing +# demand associated with decommissoned assets. Internally, it just calls +# `new_and_retro` demands and adds together both components. +# """ +# demand = new_and_retro_demands( +# capacity, market, technologies, production, current_year, forecast +# ) +# return (demand["new"] + demand["retrofit"]).rename("demand") diff --git a/src/muse/investments.py b/src/muse/investments.py index 08a000b19..8d8cc7075 100644 --- a/src/muse/investments.py +++ b/src/muse/investments.py @@ -154,6 +154,7 @@ def compute_investment( """ from numpy import zeros + # Skip the investment step if no assets or replacements are available if any(u == 0 for u in search.decision.shape): return xr.DataArray( zeros((len(search.asset), len(search.replacement))), @@ -161,6 +162,7 @@ def compute_investment( dims=("asset", "replacement"), ) + # Otherwise, compute the investment return investment( search.decision, search.search_space, @@ -305,18 +307,24 @@ def scipy_match_demand( if "timeslice" in costs.dims and timeslice_op is not None: costs = timeslice_op(costs) + + timeslice = next(cs.timeslice for cs in constraints if "timeslice" in cs.dims) + + # Select technodata for the current year if "year" in technologies.dims and year is None: raise ValueError("Missing year argument") elif "year" in technologies.dims: techs = technologies.sel(year=year).drop_vars("year") else: techs = technologies - timeslice = next(cs.timeslice for cs in constraints if "timeslice" in cs.dims) + # Run scipy optimization with highs solver adapter = ScipyAdapter.factory( techs, cast(np.ndarray, costs), timeslice, *constraints ) res = linprog(**adapter.kwargs, method="highs") + + # Backup: try with highs-ipm if not res.success and (res.status != 0): res = linprog( **adapter.kwargs, @@ -338,7 +346,9 @@ def scipy_match_demand( getLogger(__name__).critical(msg) raise GrowthOfCapacityTooConstrained - return cast(Callable[[np.ndarray], xr.Dataset], adapter.to_muse)(res.x) + # Convert results to a MUSE friendly format + result = cast(Callable[[np.ndarray], xr.Dataset], adapter.to_muse)(res.x) + return result @register_investment(name=["cvxopt"]) From e386da12460a0c11a249ec0d3a3571d0af8544d0 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 4 Oct 2024 12:10:10 +0100 Subject: [PATCH 08/92] Revert some changes to fix tests --- src/muse/demand_share.py | 9 +++++---- src/muse/investments.py | 2 +- src/muse/quantities.py | 8 +++++--- src/muse/sectors/sector.py | 19 ++++++++++++++++++- tests/test_agents.py | 2 +- tests/test_investments.py | 14 +++++++------- tests/test_subsector.py | 11 +++++++++-- 7 files changed, 46 insertions(+), 19 deletions(-) diff --git a/src/muse/demand_share.py b/src/muse/demand_share.py index f913c5fbc..01af32149 100644 --- a/src/muse/demand_share.py +++ b/src/muse/demand_share.py @@ -473,6 +473,7 @@ def _inner_split( """ from numpy import logical_and + # Find decrease in capacity production by each asset over time shares: Mapping[Hashable, xr.DataArray] = { key: method(capacity=capacity) .groupby("technology") @@ -480,24 +481,24 @@ def _inner_split( .rename(technology="asset") for key, capacity in assets.items() } + + # Total decrease in production across assets try: summed_shares: xr.DataArray = xr.concat(shares.values(), dim="concat_dim").sum( "concat_dim" ) - - # Calculates the total demand assigned in the previous step with the "method" - # function across agents and assets. total: xr.DataArray = summed_shares.sum("asset") except AttributeError: raise AgentWithNoAssetsInDemandShare() # Calculates the demand divided by the number of assets times the number of agents # if the demand is bigger than zero and the total demand assigned with the "method" - # function is zero. + # function is zero (i.e. no decrease in production). unassigned = (demand / (len(shares) * len(summed_shares))).where( logical_and(demand > 1e-12, total <= 1e-12), 0 ) + # ??? totals = { key: (share / share.sum("asset")).fillna(0) for key, share in shares.items() } diff --git a/src/muse/investments.py b/src/muse/investments.py index 8d8cc7075..19598f496 100644 --- a/src/muse/investments.py +++ b/src/muse/investments.py @@ -177,7 +177,7 @@ def compute_investment( def cliff_retirement_profile( technical_life: xr.DataArray, - investment_year: int, + investment_year: int = 0, interpolation: str = "linear", **kwargs, ) -> xr.DataArray: diff --git a/src/muse/quantities.py b/src/muse/quantities.py index 7daf87b15..66df60c39 100644 --- a/src/muse/quantities.py +++ b/src/muse/quantities.py @@ -250,9 +250,11 @@ def decommissioning_demand( baseyear = min(year) dyears = [u for u in year if u != baseyear] - return maximum_production( - technologies, capacity.sel(year=baseyear) - capacity.sel(year=dyears) - ).clip(min=0) + # Calculate the decrease in capacity from the current year to future years + capacity_decrease = capacity.sel(year=baseyear) - capacity.sel(year=dyears) + + # Calculate production associated with this capacity + return maximum_production(technologies, capacity_decrease).clip(min=0) def consumption( diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index 1dc42dd52..31e04dae5 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -26,6 +26,7 @@ def factory(cls, name: str, settings: Any) -> Sector: from muse.interactions import factory as interaction_factory from muse.outputs.sector import factory as ofactory from muse.production import factory as pfactory + from muse.readers import read_timeslices from muse.readers.toml import read_technodata from muse.utilities import nametuple_to_dict @@ -38,6 +39,11 @@ def factory(cls, name: str, settings: Any) -> Sector: if len(sector_settings["subsectors"]._asdict()) == 0: raise RuntimeError(f"Empty 'subsectors' section in sector {name}") + # Timeslices + timeslices = read_timeslices( + sector_settings.pop("timeslice_levels", None) + ).get_index("timeslice") + # Read technologies technologies = read_technodata(settings, name, settings.time_framework) @@ -79,12 +85,18 @@ def factory(cls, name: str, settings: Any) -> Sector: interactions = interaction_factory(sector_settings.pop("interactions", None)) # Create sector - for attr in ("technodata", "commodities_out", "commodities_in"): + for attr in ( + "technodata", + "commodities_out", + "commodities_in", + "technodata_timeslices", + ): sector_settings.pop(attr, None) return cls( name, technologies, subsectors=subsectors, + timeslices=timeslices, supply_prod=supply, outputs=outputs, interactions=interactions, @@ -96,6 +108,7 @@ def __init__( name: str, technologies: xr.Dataset, subsectors: Sequence[Subsector] = [], + timeslices: pd.MultiIndex | None = None, interactions: Callable[[Sequence[AbstractAgent]], None] | None = None, interpolation: str = "linear", outputs: Callable | None = None, @@ -111,6 +124,10 @@ def __init__( """Subsectors controlled by this object.""" self.technologies: xr.Dataset = technologies """Parameters describing the sector's technologies.""" + self.timeslices: pd.MultiIndex | None = timeslices + """Timeslice at which this sector operates. + If None, it will operate using the timeslice of the input market. + """ self.interpolation: Mapping[str, Any] = { "method": interpolation, "kwargs": {"fill_value": "extrapolate"}, diff --git a/tests/test_agents.py b/tests/test_agents.py index c767c2c98..3287e0e3c 100644 --- a/tests/test_agents.py +++ b/tests/test_agents.py @@ -143,7 +143,7 @@ def test_run_retro_agent(retro_agent, technologies, agent_market, demand_share): technologies.max_capacity_addition[:] = retro_agent.assets.capacity.sum() * 100 technologies.max_capacity_growth[:] = retro_agent.assets.capacity.sum() * 100 - retro_agent.next(technologies, agent_market, demand_share) + retro_agent.next(technologies, agent_market, demand_share, time_period=5) def test_merge_assets(assets): diff --git a/tests/test_investments.py b/tests/test_investments.py index 415f0c9f5..f330eda43 100644 --- a/tests/test_investments.py +++ b/tests/test_investments.py @@ -44,7 +44,7 @@ def test_cliff_retirement_known_profile(): name="technical_life", ) - profile = cliff_retirement_profile(lifetime) + profile = cliff_retirement_profile(technical_life=lifetime) expected = array( [ [True, False, False, False], @@ -73,12 +73,12 @@ def test_cliff_retirement_random_profile(protected): ) effective_lifetime = (protected // lifetime + 1) * lifetime - current = 5 + investment_year = 5 profile = cliff_retirement_profile( - lifetime, investment_year=current, protected=protected + technical_life=lifetime, investment_year=investment_year, protected=protected ) - assert profile.year.min() == current - assert profile.year.max() <= current + effective_lifetime.max() + 1 - assert profile.astype(int).interp(year=current).all() - assert profile.astype(int).interp(year=current + protected).all() + assert profile.year.min() == investment_year + assert profile.year.max() <= investment_year + effective_lifetime.max() + 1 + assert profile.astype(int).interp(year=investment_year).all() + assert profile.astype(int).interp(year=investment_year + protected).all() assert not profile.astype(int).interp(year=profile.year.max()).any() diff --git a/tests/test_subsector.py b/tests/test_subsector.py index 9c326f1f4..3ec688c26 100644 --- a/tests/test_subsector.py +++ b/tests/test_subsector.py @@ -48,7 +48,12 @@ def test_subsector_investing_aggregation(): subsector = Subsector(agents, commodities) initial_agents = deepcopy(agents) assert {agent.year for agent in agents} == {int(market.year.min())} - assert subsector.aggregate_lp(technologies, market) is None + assert ( + subsector.aggregate_lp( + technologies, market, time_period=5, current_year=5 + ) + is None + ) assert {agent.year for agent in agents} == {int(market.year.min() + 5)} for initial, final in zip(initial_agents, agents): assert initial.assets.sum() != final.assets.sum() @@ -105,7 +110,9 @@ def test_subsector_noninvesting_aggregation(market, model, technologies, tmp_pat commodity=technologies.commodity, region=technologies.region ).interp(year=[2020, 2025]) assert all(agent.year == 2020 for agent in agents) - result = subsector.aggregate_lp(technologies, market) + result = subsector.aggregate_lp( + technologies, market, time_period=5, current_year=2020 + ) assert result is not None assert len(result) == 2 From 018ca5c3bb8c32dc438a3edf4802de4a057165e5 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 4 Oct 2024 12:32:59 +0100 Subject: [PATCH 09/92] Fix tests --- tests/test_investments.py | 4 ++-- tests/test_subsector.py | 18 +----------------- 2 files changed, 3 insertions(+), 19 deletions(-) diff --git a/tests/test_investments.py b/tests/test_investments.py index f330eda43..a4fc87690 100644 --- a/tests/test_investments.py +++ b/tests/test_investments.py @@ -75,10 +75,10 @@ def test_cliff_retirement_random_profile(protected): investment_year = 5 profile = cliff_retirement_profile( - technical_life=lifetime, investment_year=investment_year, protected=protected + technical_life=lifetime.clip(min=protected), investment_year=investment_year ) assert profile.year.min() == investment_year assert profile.year.max() <= investment_year + effective_lifetime.max() + 1 assert profile.astype(int).interp(year=investment_year).all() - assert profile.astype(int).interp(year=investment_year + protected).all() + assert profile.astype(int).interp(year=investment_year + protected - 1).all() assert not profile.astype(int).interp(year=profile.year.max()).any() diff --git a/tests/test_subsector.py b/tests/test_subsector.py index 3ec688c26..5d18358a9 100644 --- a/tests/test_subsector.py +++ b/tests/test_subsector.py @@ -1,4 +1,3 @@ -from collections.abc import Sequence from unittest.mock import MagicMock, patch import xarray as xr @@ -110,22 +109,7 @@ def test_subsector_noninvesting_aggregation(market, model, technologies, tmp_pat commodity=technologies.commodity, region=technologies.region ).interp(year=[2020, 2025]) assert all(agent.year == 2020 for agent in agents) - result = subsector.aggregate_lp( - technologies, market, time_period=5, current_year=2020 - ) - - assert result is not None - assert len(result) == 2 - - lpcosts, lpconstraints = result - assert isinstance(lpcosts, xr.Dataset) - assert {"search_space", "decision"} == set(lpcosts.data_vars) - assert "agent" in lpcosts.coords - assert isinstance(lpconstraints, Sequence) - assert len(lpconstraints) == 1 - assert all(isinstance(u, xr.Dataset) for u in lpconstraints) - # makes sure agent investment got called - assert all(agent.year == 2025 for agent in agents) + subsector.aggregate_lp(technologies, market, time_period=5, current_year=2020) def test_factory_smoke_test(model, technologies, tmp_path): From 4fd2e79a486e026f31412f8362fc7e54f3ac8535 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 4 Oct 2024 14:02:07 +0100 Subject: [PATCH 10/92] Small fix to another test --- src/muse/agents/agent.py | 3 +-- src/muse/demand_share.py | 20 -------------------- tests/test_utilities.py | 2 +- 3 files changed, 2 insertions(+), 23 deletions(-) diff --git a/src/muse/agents/agent.py b/src/muse/agents/agent.py index 6a014e343..836adbab6 100644 --- a/src/muse/agents/agent.py +++ b/src/muse/agents/agent.py @@ -161,8 +161,7 @@ def __init__( ) self.year = year - """ Current year. Incremented by one every time next is called. - """ + """ Current year. Incremented by one every time next is called.""" self.forecast = forecast """Number of years to look into the future for forecating purposed.""" if search_rules is None: diff --git a/src/muse/demand_share.py b/src/muse/demand_share.py index 01af32149..a5187c4fe 100644 --- a/src/muse/demand_share.py +++ b/src/muse/demand_share.py @@ -676,23 +676,3 @@ def new_and_retro_demands( retro_demand = retro_demand.squeeze("year") return xr.Dataset({"new": new_demand, "retrofit": retro_demand}) - - -# def new_demand( -# capacity: xr.DataArray, -# market: xr.Dataset, -# technologies: xr.Dataset, -# production: Union[str, Mapping, Callable] = "maximum_production", -# current_year: Optional[int] = None, -# forecast: int = 5, -# ) -> xr.DataArray: -# """Calculates the new demand that needs to be covered. - -# It groups the demand related to an increase in consumption as well as the existing -# demand associated with decommissoned assets. Internally, it just calls -# `new_and_retro` demands and adds together both components. -# """ -# demand = new_and_retro_demands( -# capacity, market, technologies, production, current_year, forecast -# ) -# return (demand["new"] + demand["retrofit"]).rename("demand") diff --git a/tests/test_utilities.py b/tests/test_utilities.py index f829ac6be..44d7bf0f6 100644 --- a/tests/test_utilities.py +++ b/tests/test_utilities.py @@ -37,7 +37,7 @@ def test_reduce_assets_with_zero_size(capacity: xr.DataArray): x = capacity.sel(asset=[]) actual = reduce_assets(x) - assert actual is x + assert (actual == x).all() def test_broadcast_tech(technologies, capacity): From 51a5273f9720f95fa4126b6654e11414a490c616 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 4 Oct 2024 14:58:40 +0100 Subject: [PATCH 11/92] Delete legacy sector --- src/muse/mca.py | 68 +---- src/muse/sectors/legacy_sector.py | 451 ------------------------------ 2 files changed, 1 insertion(+), 518 deletions(-) delete mode 100644 src/muse/sectors/legacy_sector.py diff --git a/src/muse/mca.py b/src/muse/mca.py index 7fa2cd481..1440a7d76 100644 --- a/src/muse/mca.py +++ b/src/muse/mca.py @@ -274,23 +274,14 @@ def run(self) -> None: """ from logging import getLogger - from numpy import where from xarray import DataArray - _, self.sectors, hist_years = self.calibrate_legacy_sectors() - if len(hist_years) > 0: - hist = where(self.time_framework <= hist_years[-1])[0] - start = hist[-1] - - else: - start = -1 - nyear = len(self.time_framework) - 1 check_carbon_budget = len(self.carbon_budget) and len(self.carbon_commodities) shoots = self.control_undershoot or self.control_overshoot variables = ["supply", "consumption", "prices"] - for year_idx in range(start + 1, nyear): + for year_idx in range(nyear): years = self.time_framework[year_idx : year_idx + 2] getLogger(__name__).info(f"Running simulation year {years[0]}...") new_market = self.market[variables].sel(year=years) @@ -342,63 +333,6 @@ def run(self) -> None: f"Finish simulation year {years[0]} ({year_idx+1}/{nyear})!" ) - def calibrate_legacy_sectors(self): - """Run a calibration step in the legacy sectors. - - Run historical years. - """ - from copy import deepcopy - from logging import getLogger - - from numpy import where - - hist_years = [] - if len([s for s in self.sectors if "LegacySector" in str(type(s))]) == 0: - return None, self.sectors, hist_years - - sectors = [] - idx = [] - for i, s in enumerate(self.sectors): - if "LegacySector" in str(type(s)): - s.mode = "Calibration" - sectors.append(s) - idx.append(i) - - getLogger(__name__).info("Calibrating LegacySectors...") - - if 2015 in self.time_framework: - hist_years = self.time_framework[where(self.time_framework <= 2015)] - hist = len(hist_years) - for year_idx in range(hist): # range(nyear): - years = self.time_framework[year_idx : year_idx + 1] - sectors = deepcopy(sectors) - variables = ["supply", "consumption", "prices"] - new_market = self.market[variables].sel(year=years).copy(deep=True) - for sector in sectors: - sector_market = sector.next( - new_market[["supply", "consumption", "prices"]] # type:ignore - ) - - sector_market = sector_market.sel(year=new_market.year) - - dims = {i: sector_market[i] for i in sector_market.consumption.dims} - - sector_market.consumption.loc[dims] = ( - sector_market.consumption.loc[dims] - sector_market.supply.loc[dims] - ).clip(min=0.0, max=None) - new_market.consumption.loc[dims] += sector_market.consumption - - dims = {i: sector_market[i] for i in sector_market.supply.dims} - new_market.supply.loc[dims] += sector_market.supply - - for i, s in enumerate(sectors): - s.mode = "Iteration" - self.sectors[idx[i]] = s - - getLogger(__name__).info("Finish calibration of LegacySectors!") - - return None, self.sectors, hist_years - class SingleYearIterationResult(NamedTuple): """Result of iterating over sectors for a year. diff --git a/src/muse/sectors/legacy_sector.py b/src/muse/sectors/legacy_sector.py deleted file mode 100644 index ad61cdc6f..000000000 --- a/src/muse/sectors/legacy_sector.py +++ /dev/null @@ -1,451 +0,0 @@ -"""This module defines the LegacySector class. - -This is needed to interface the new MCA with the old MUSE sectors. It can be deleted -once accessing those sectors is no longer needed. -""" - -from collections.abc import Sequence -from dataclasses import dataclass -from itertools import chain -from logging import getLogger -from typing import Any, Union - -import numpy as np -import pandas as pd -from xarray import DataArray, Dataset - -from muse.readers import read_csv_timeslices, read_initial_market -from muse.sectors.abstract import AbstractSector -from muse.sectors.register import register_sector -from muse.timeslices import QuantityType, new_to_old_timeslice - - -@dataclass -class LegacyMarket: - BaseYear: int - EndYear: int - Foresight: np.ndarray - TimeFramework: np.ndarray - YearlyTimeFramework: np.ndarray - NYears: list - GlobalCommoditiesAttributes: np.ndarray - CommoditiesBudget: list - macro_drivers: pd.DataFrame - dfRegions: pd.DataFrame - Regions: np.ndarray - interpolation_mode: str - - -@register_sector(name="legacy") -class LegacySector(AbstractSector): # type: ignore - @classmethod - def factory(cls, name: str, settings: Any, **kwargs) -> "LegacySector": - from pathlib import Path - - from muse_legacy.sectors import SECTORS - - from muse.readers import read_technologies - - sector = getattr(settings.sectors, name) - - settings_dir = sector.userdata_path - sectors_dir = Path(sector.technodata_path).parent - excess = sector.excess - - base_year = settings.time_framework[0] - end_year = settings.time_framework[-1] - - path = settings.global_input_files.macrodrivers - macro_drivers = pd.read_csv(path).sort_index(ascending=True) - - path = settings.global_input_files.regions - regions = pd.read_csv(path).sort_index(ascending=True) - global_commodities = read_technologies( - Path(sector.technodata_path) / f"technodata{name.title()}.csv", - None, - Path(sector.technodata_path) / f"commOUTtechnodata{name.title()}.csv", - Path(sector.technodata_path) / f"commINtechnodata{name.title()}.csv", - commodities=settings.global_input_files.global_commodities, - )[["heat_rate", "unit", "emmission_factor"]] - - interpolation_mode = ( - "Active" if settings.interpolation_mode == "linear" else "off" - ) - - market = LegacyMarket( - BaseYear=base_year, - EndYear=end_year, - Foresight=np.array([settings.foresight]), - TimeFramework=settings.time_framework, - YearlyTimeFramework=np.arange(base_year, end_year + 1, 1, dtype=int), - NYears=list(np.diff(settings.time_framework)), - GlobalCommoditiesAttributes=global_commodities.commodity.values, - CommoditiesBudget=settings.carbon_budget_control.commodities, - macro_drivers=macro_drivers, - dfRegions=regions, - Regions=np.array(settings.regions), - interpolation_mode=interpolation_mode, - ) - - timeslices, aggregation = cls.load_timeslices_and_aggregation( - settings.timeslices, settings.sectors - ) - timeslices = { - "prices": timeslices["prices"], - "finest": timeslices["finest"], - "finest aggregation": aggregation, - name: timeslices[name], - } - - initial = ( - read_initial_market( - settings.global_input_files.projections, - base_year_export=getattr( - settings.global_input_files, "base_year_export", None - ), - base_year_import=getattr( - settings.global_input_files, "base_year_import", None - ), - timeslices=timeslices["prices"], - ) - .sel(region=settings.regions) - .interp(year=settings.time_framework, method=settings.interpolation_mode) - ) - commodity_price = initial["prices"] - static_trade = initial["static_trade"] - - old_sector = SECTORS[name]( - market=market, sectors_dir=sectors_dir, settings_dir=settings_dir - ) - - old_sector.SectorCommoditiesOUT = commodities_idx(old_sector, "OUT") - old_sector.SectorCommoditiesIN = commodities_idx(old_sector, "IN") - old_sector.SectorCommoditiesNotENV = commodities_idx(old_sector, "NotENV") - - sector_comm = list( - set(old_sector.SectorCommoditiesOUT).union(old_sector.SectorCommoditiesIN) - ) - - commodities = { - "global": global_commodities, - name: global_commodities.isel(commodity=sector_comm), - } - - msg = f"LegacySector {name} created successfully." - getLogger(__name__).info(msg) - return cls( - name, - old_sector, - timeslices, - commodities, - commodity_price, - static_trade, - settings.regions, - settings.time_framework, - "Calibration" if getattr(settings, "calibration", False) else "Iteration", - excess, - "converged", - str(sectors_dir), - str(sector.output_path), - ) - - def __init__( - self, - name: str, - old_sector, - timeslices: dict, - commodities: dict, - commodity_price: DataArray, - static_trade: DataArray, - regions: Sequence, - time_framework: np.ndarray, - mode: str, - excess: Union[int, float], - market_iterative: str, - sectors_dir: str, - output_dir: str, - ): - super().__init__() - self.name = name - """Name of the sector""" - self.old_sector = old_sector - """Legacy sector method to run the calculation""" - assert "prices" in timeslices - assert "finest" in timeslices - assert name in timeslices - self.timeslices = timeslices - """Timeslices for sectors and mca.""" - self.commodities = commodities - """Commodities for each sector, as well as global commodities.""" - self.commodity_price = commodity_price - """Initial price of all the commodities.""" - self.static_trade = static_trade - """Static trade needed for the conversion and supply sectors.""" - self.regions = regions - """Regions taking part in the simulation.""" - self.time_framework = time_framework - """Time framework of the complete simulation.""" - self.mode = mode - """If 'Calibration', the sector runs in calibration mode""" - self.excess = excess - """Allowed excess of capacity.""" - self.market_iterative = market_iterative - """ -----> TODO what's this parameter?""" - self.sectors_dir = sectors_dir - """Sectors directory.""" - self.output_dir = output_dir - """Outputs directory.""" - self.dims = ("commodity", "region", "year", "timeslice") - """Order of the input and output dimensions.""" - self.calibrated = False - """Flag if the sector has gone through the calibration process.""" - - def next(self, market: Dataset) -> Dataset: - """Adapter between the old and the new.""" - from muse_legacy.sectors.sector import Demand - - self.commodity_price.loc[{"year": market.year}] = market.prices - - # Consumption in Conversion and Supply sectors depend on the static trade - # TODO This might need to go outside, in the MCA since it will affect all - # sectors, not just the legacy ones. But static trade seems to be always zero, - # so not sure how useful it might be. - if not issubclass(type(self.old_sector), Demand): - consumption = ( - market.consumption - self.static_trade.sel(year=market.year) - ).clip(min=0.0) - else: - consumption = market.consumption.copy() - - converted = self.inputs( - consumption=consumption, supply=market.supply, prices=self.commodity_price - ) - - idx = int(np.argwhere(self.time_framework == market.year.values[0])) - - result = self.runprocessmodule( - converted.consumption, - converted.supplycost, - converted.supply, - (idx, market.year.values[0]), - ) - - result = self.outputs( - consumption=result.consumption, - supply=result.supply, - prices=result.supplycost, - ).sel(year=market.year) - - result["comm_usage"] = self.commodities[self.name].comm_usage - result = result.set_coords("comm_usage") - - # Prices in Demand sectors should not change. - if issubclass(type(self.old_sector), Demand): - result["prices"] = self.commodity_price.copy() - - return result - - def runprocessmodule(self, consumption, supplycost, supply, t): - params = [ - consumption, - supplycost, - supply, - new_to_old_timeslice(self.timeslices["prices"]), - new_to_old_timeslice( - self.timeslices["finest"], self.timeslices["finest aggregation"] - ), - t, - self.mode, - ] - - inputs = {"output_dir": self.output_dir, "sectors_dir": self.sectors_dir} - - if self.name == "Power": - if self.mode == "Calibration": - params += [self.market_iterative] - result = self.old_sector.power_calibration(*params, **inputs) - self.mode = "Iteration" - else: - self.mode = "Iteration" - params += [self.old_sector.instance, self.market_iterative, self.excess] - result = self.old_sector.runprocessmodule(*params, **inputs) - else: - params += [self.market_iterative, self.excess] - result = self.old_sector.runprocessmodule(*params, **inputs) - - self.old_sector.report(result, t[1], self.output_dir) - - return result - - @staticmethod - def load_timeslices_and_aggregation(timeslices, sectors) -> tuple[dict, str]: - """Loads all sector timeslices and finds the finest one.""" - timeslices = {"prices": timeslices.rename("prices timeslices")} - finest = timeslices["prices"].copy() - aggregation = "month" - - for sector in sectors.list: - sector_ts = read_csv_timeslices( - getattr(sectors, sector).timeslices_path - ).rename(sector + " timeslice") - timeslices[sector] = sector_ts - - # Now we get the finest - if len(finest) < len(sector_ts): - finest = timeslices[sector] - aggregation = getattr(sectors, sector).agregation_level - elif len(finest) == len(sector_ts) and any( - finest.get_index("timeslice") != sector_ts.get_index("timeslice") - ): - raise ValueError("Timeslice order do not match") - - timeslices["finest"] = finest - timeslices["finest"] = timeslices["finest"].rename("finest timeslice") - - return timeslices, aggregation - - @property - def global_commodities(self): - """List of all commodities used by the MCA.""" - return self.commodities["global"].commodity.values - - @property - def sector_commodities(self): - """List of all commodities used by the Sector.""" - return self.commodities[self.name].commodity.values - - @property - def sector_timeslices(self): - """List of all commodities used by the MCA.""" - return self.timeslices[self.name] - - def _to(self, data: np.ndarray, data_ts, ts: pd.MultiIndex, qt: QuantityType): - """From ndarray to dataarray.""" - return ndarray_to_xarray( - years=self.time_framework, - data=data, - ts=ts, - qt=qt, - global_commodities=self.global_commodities, - sector_commodities=self.sector_commodities, - data_ts=data_ts, - dims=self.dims, - regions=self.regions, - ) - - def _from(self, xdata: DataArray, ts: pd.MultiIndex, qt: QuantityType): - """From dataarray to ndarray.""" - return xarray_to_ndarray( - years=self.time_framework, - xdata=xdata, - ts=ts, - qt=qt, - global_commodities=self.global_commodities, - dims=self.dims, - regions=self.regions, - ) - - def outputs( - self, consumption: np.ndarray, prices: np.ndarray, supply: np.ndarray - ) -> Dataset: - """Converts MUSE numpy outputs to xarray.""" - from muse.timeslices import QuantityType - - finest, prices_ts = self.timeslices["finest"], self.timeslices["prices"] - c = self._to(consumption, finest, prices_ts, QuantityType.EXTENSIVE) - s = self._to(supply, self.sector_timeslices, prices_ts, QuantityType.EXTENSIVE) - p = self._to(prices, self.sector_timeslices, prices_ts, QuantityType.INTENSIVE) - return Dataset({"consumption": c, "supply": s, "costs": p}) - - def inputs(self, consumption: DataArray, prices: DataArray, supply: DataArray): - """Converts xarray to MUSE numpy input arrays.""" - from muse_legacy.sectors.sector import Sector as OriginalSector - - MarketVars = OriginalSector.MarketVars - - finest, prices_ts = self.timeslices["finest"], self.timeslices["prices"] - c = self._from(consumption, finest, QuantityType.EXTENSIVE) - s = self._from(supply, finest, QuantityType.EXTENSIVE) - p = self._from(prices, prices_ts, QuantityType.INTENSIVE) - - return MarketVars(consumption=c, supply=s, supplycost=p) - - -def ndarray_to_xarray( - years: np.ndarray, - data: np.ndarray, - ts: pd.MultiIndex, - qt: QuantityType, - global_commodities: DataArray, - sector_commodities: DataArray, - data_ts: pd.MultiIndex, - dims: Sequence[str], - regions: Sequence[str], -) -> DataArray: - """From ndarray to dataarray.""" - from collections.abc import Hashable, Mapping - - from muse.timeslices import convert_timeslice - - coords: Mapping[Hashable, Any] = { - "year": years, - "commodity": global_commodities, - "region": regions, - "timeslice": data_ts, - } - result = convert_timeslice(DataArray(data, coords=coords, dims=dims), ts, qt) - assert isinstance(result, DataArray) - return result.sel(commodity=sector_commodities).transpose(*dims) - - -def xarray_to_ndarray( - years: np.ndarray, - xdata: DataArray, - ts: pd.MultiIndex, - qt: QuantityType, - global_commodities: DataArray, - dims: Sequence[str], - regions: Sequence[str], -) -> np.ndarray: - """From dataarray to ndarray.""" - from collections.abc import Hashable, Mapping - - from muse.timeslices import convert_timeslice - - coords: Mapping[Hashable, Any] = { - "year": years, - "commodity": global_commodities, - "region": regions, - "timeslice": ts, - } - warp = np.zeros((len(global_commodities), len(regions), len(years), len(ts))) - result = DataArray(warp, coords=coords, dims=dims) - result.loc[{"year": xdata.year}] = convert_timeslice(xdata, ts, qt).transpose(*dims) - - return result.values - - -def commodities_idx(sector, comm: str) -> Sequence: - """Gets the indices of the commodities involved in the processes of the sector. - - Arguments: - sector: The old MUSE sector of interest - comm: Either "OUT", "IN" or "NotENV" - - Returns: - A list with the indexes - """ - comm = { - "OUT": "listIndexCommoditiesOUT", - "IN": "listIndexCommoditiesIN", - "NotENV": "listIndexNotEnvironmental", - }[comm] - - comm_list = chain.from_iterable( - chain.from_iterable( - [[c for c in p.__dict__[comm]] for p in wp.processes + wp.OtherProcesses] - for wp in sector - ) - ) - - return list({item for item in comm_list}) From 1518034180c3f42d8081d0cfe16255fdacf09ecb Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 4 Oct 2024 15:05:35 +0100 Subject: [PATCH 12/92] Delete tests and documentation --- docs/api.rst | 5 - docs/source/muse.sectors.rst | 8 -- src/muse/__init__.py | 1 - src/muse/sectors/__init__.py | 3 - tests/conftest.py | 5 +- tests/test_legacy_sector.py | 183 ----------------------------------- 6 files changed, 1 insertion(+), 204 deletions(-) delete mode 100644 tests/test_legacy_sector.py diff --git a/docs/api.rst b/docs/api.rst index 1d1828be3..0d47458d6 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -55,11 +55,6 @@ PresetSector .. autoclass:: muse.sectors.preset_sector.PresetSector :members: -LegacySector -~~~~~~~~~~~~ - -.. autoclass:: muse.sectors.legacy_sector.LegacySector - :members: Production ~~~~~~~~~~ diff --git a/docs/source/muse.sectors.rst b/docs/source/muse.sectors.rst index ed2f2fab3..c6f0db570 100644 --- a/docs/source/muse.sectors.rst +++ b/docs/source/muse.sectors.rst @@ -12,14 +12,6 @@ muse.sectors.abstract module :undoc-members: :show-inheritance: -muse.sectors.legacy\_sector module ----------------------------------- - -.. automodule:: muse.sectors.legacy_sector - :members: - :undoc-members: - :show-inheritance: - muse.sectors.preset\_sector module ---------------------------------- diff --git a/src/muse/__init__.py b/src/muse/__init__.py index dff12e09a..ee6182dcc 100644 --- a/src/muse/__init__.py +++ b/src/muse/__init__.py @@ -59,6 +59,5 @@ def _create_logger(color: bool = True): "objectives", "outputs", "sectors", - "legacy_sectors", VERSION, ] diff --git a/src/muse/sectors/__init__.py b/src/muse/sectors/__init__.py index 94370517b..822e91e9a 100644 --- a/src/muse/sectors/__init__.py +++ b/src/muse/sectors/__init__.py @@ -7,8 +7,6 @@ investing in new assets. - :class:`~muse.sectors.preset_sector.PresetSector`: A sector that is meant to generate demand for the sectors above using a fixed formula or schedule. -- :class:`~muse.sectors.legacy_sector.LegacySector`: A wrapper around the original MUSE - sectors. All the sectors derive from :class:`AbstractSector`. The :class:`AbstractSector` defines two `abstract`__ functions which should be declared by derived sectors. `Abstract`__ @@ -38,7 +36,6 @@ "SECTORS_REGISTERED", ] from muse.sectors.abstract import AbstractSector -from muse.sectors.legacy_sector import LegacySector from muse.sectors.preset_sector import PresetSector from muse.sectors.register import SECTORS_REGISTERED, register_sector from muse.sectors.sector import Sector diff --git a/tests/conftest.py b/tests/conftest.py index 0443eb8a3..efc0e38f8 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -589,11 +589,8 @@ def drop_optionals(settings): def warnings_as_errors(request): from warnings import simplefilter - # disable fixture for some tests using legacy sectors. + # disable fixture for some tests if ( - request.module.__name__ == "test_legacy_sector" - and request.node.name.startswith("test_legacy_sector_regression[") - ) or ( request.module.__name__ == "test_outputs" and request.node.name == "test_save_with_fullpath_to_excel_with_sink" ): diff --git a/tests/test_legacy_sector.py b/tests/test_legacy_sector.py deleted file mode 100644 index 6b9aaca43..000000000 --- a/tests/test_legacy_sector.py +++ /dev/null @@ -1,183 +0,0 @@ -from pathlib import Path -from typing import Optional - -from pytest import approx, mark - - -def legacy_inputs(): - try: - import muse_legacy - except ImportError: - return [] - - from muse_legacy.sectors import SECTORS - - excluded = { - "Bioenergy", - "Commercial", - "Industry", - "NET", - "Refinery", - "Residential", - "IndustryABM", - "Sequestration", - "TradeSupply", - "TradeRefinery", - "TradePower", - "Transport", - "Shipping", - "Supply", - "Power", - } - - return [ - ( - sector, - Path(muse_legacy.__file__).parent - / "data" - / "test" - / "cases" - / sector - / f"settings_legacy_{sector.lower()}.toml", - ) - for sector in set(SECTORS) - excluded - ] - - -def legacy_input_file(sector: str) -> Optional[Path]: - """Gets the legacy sector settings file.""" - input_file = ( - Path(__file__).parent - / "data" - / "cases" - / sector - / f"settings_legacy_{sector.lower()}.toml" - ) - - return input_file - - -def update_settings(settings, sec_dir, out_dir): - """Updates a settings namedtuple with temporal sectors and output directories.""" - sectors = settings.sectors - - for s in sectors.list: - path = Path(sec_dir) / s - sector = getattr(sectors, s)._replace( - userdata_path=path, technodata_path=path, output_path=out_dir - ) - sectors = sectors._replace(**{s: sector}) - - return settings._replace(sectors=sectors) - - -@mark.legacy -@mark.sgidata -@mark.parametrize("sector,filepath", legacy_inputs()) -def test_legacy_sector_creation(sector, filepath): - """Test the creation of the legacy sectors.""" - from muse.readers import read_settings - from muse.sectors import SECTORS_REGISTERED - - settings = read_settings(filepath) - - SECTORS_REGISTERED["legacy"](name=sector, settings=settings) - - -def test_xarray_to_array(market): - import numpy as np - - from muse.sectors.legacy_sector import xarray_to_ndarray - from muse.timeslices import QuantityType - - dims = ("commodity", "region", "year", "timeslice") - arr = xarray_to_ndarray( - years=market.year, - xdata=market.supply, - ts=market.timeslice, - qt=QuantityType.EXTENSIVE, - global_commodities=market.commodity, - dims=dims, - regions=np.array(market.region), - ) - - assert arr == approx(market.supply.transpose(*dims).values) - - -def test_array_to_xarray(market): - from numpy import array - from xarray import broadcast - - from muse.sectors.legacy_sector import ndarray_to_xarray - from muse.timeslices import QuantityType - - dims = ("commodity", "region", "year", "timeslice") - arr = market.supply.transpose(*dims).values - result = ndarray_to_xarray( - years=market.year, - data=arr, - ts=market.timeslice, - qt=QuantityType.EXTENSIVE, - global_commodities=market.commodity, - sector_commodities=market.commodity, - data_ts=market.timeslice, - dims=dims, - regions=array(market.region), - ) - - expected, actual = broadcast(market.supply, result) - assert actual.values == approx(expected.values) - - -def test_round_trip(market): - from numpy import array - from xarray import broadcast - - from muse.sectors.legacy_sector import ndarray_to_xarray, xarray_to_ndarray - from muse.timeslices import QuantityType - - dims = ("commodity", "region", "year", "timeslice") - - arr = xarray_to_ndarray( - years=market.year, - xdata=market.supply, - ts=market.timeslice, - qt=QuantityType.EXTENSIVE, - global_commodities=market.commodity, - dims=dims, - regions=array(market.region), - ) - - result = ndarray_to_xarray( - years=market.year, - data=arr, - ts=market.timeslice, - qt=QuantityType.EXTENSIVE, - global_commodities=market.commodity, - sector_commodities=market.commodity, - data_ts=market.timeslice, - dims=dims, - regions=array(market.region), - ) - - expected, actual = broadcast(market.supply, result) - assert actual.values == approx(expected.values) - - -@mark.legacy -@mark.sgidata -@mark.regression -@mark.parametrize("sector,filepath", legacy_inputs()) -def test_legacy_sector_regression(sector, filepath, sectors_dir, tmpdir, compare_dirs): - """Test the execution of the next method in the legacy sectors for 1 year.""" - from muse.mca import MCA - from muse.readers import read_settings - - settings = read_settings(filepath) - settings = update_settings(settings, sectors_dir, tmpdir) - - mca = MCA.factory(settings) - mca.run() - - regression_dir = filepath.parent - compare_dirs(tmpdir, regression_dir / "output") From 3b0cb49bd6e1df925406f825c4e465fb6ed2f706 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 4 Oct 2024 15:51:45 +0100 Subject: [PATCH 13/92] Remove more redundant code --- src/muse/demand_share.py | 34 ++++++++------------------- src/muse/sectors/sector.py | 47 ++++++++++++++++---------------------- 2 files changed, 30 insertions(+), 51 deletions(-) diff --git a/src/muse/demand_share.py b/src/muse/demand_share.py index a5187c4fe..9c7e42120 100644 --- a/src/muse/demand_share.py +++ b/src/muse/demand_share.py @@ -112,9 +112,9 @@ def new_and_retro( agents: Sequence[AbstractAgent], market: xr.Dataset, technologies: xr.Dataset, + current_year: int, + forecast: int, production: Union[str, Mapping, Callable] = "maximum_production", - current_year: Optional[int] = None, - forecast: int = 5, ) -> xr.DataArray: r"""Splits demand across new and retro agents. @@ -236,9 +236,6 @@ def decommissioning(capacity): technologies, capacity, year=[current_year, current_year + forecast] ).squeeze("year") - if current_year is None: - current_year = market.year.min() - capacity = reduce_assets([u.assets.capacity for u in agents]) demands = new_and_retro_demands( @@ -323,9 +320,9 @@ def standard_demand( agents: Sequence[AbstractAgent], market: xr.Dataset, technologies: xr.Dataset, + current_year: int, + forecast: int, production: Union[str, Mapping, Callable] = "maximum_production", - current_year: Optional[int] = None, - forecast: int = 5, ) -> xr.DataArray: r"""Splits demand across new agents. @@ -360,9 +357,6 @@ def decommissioning(capacity): technologies, capacity, year=[current_year, current_year + forecast] ).squeeze("year") - if current_year is None: - current_year = market.year.min() - # Make sure there are no retrofit agents for agent in agents: if agent.category == "retrofit": @@ -433,18 +427,15 @@ def unmet_forecasted_demand( agents: Sequence[AbstractAgent], market: xr.Dataset, technologies: xr.Dataset, - current_year: Optional[int] = None, + current_year: int, + forecast: int, production: Union[str, Mapping, Callable] = "maximum_production", - forecast: int = 5, ) -> xr.DataArray: """Forecast demand that cannot be serviced by non-decommissioned current assets.""" from muse.commodities import is_enduse from muse.timeslices import QuantityType, convert_timeslice from muse.utilities import reduce_assets - if current_year is None: - current_year = market.year.min() - year = current_year + forecast comm_usage = technologies.comm_usage.sel(commodity=market.commodity) smarket: xr.Dataset = market.where(is_enduse(comm_usage), 0).interp(year=year) @@ -555,8 +546,8 @@ def new_consumption( capacity: xr.DataArray, market: xr.Dataset, technologies: xr.Dataset, - current_year: Optional[int] = None, - forecast: int = 5, + current_year: int, + forecast: int, ) -> xr.DataArray: r"""Computes share of the demand attributed to new agents. @@ -576,9 +567,6 @@ def new_consumption( from muse.timeslices import QuantityType, convert_timeslice - if current_year is None: - current_year = market.year.min() - # Interpolate market to forecast year market = market.interp(year=[current_year, current_year + forecast]) current = market.sel(year=current_year, drop=True) @@ -605,9 +593,9 @@ def new_and_retro_demands( capacity: xr.DataArray, market: xr.Dataset, technologies: xr.Dataset, + current_year: int, + forecast: int, production: Union[str, Mapping, Callable] = "maximum_production", - current_year: Optional[int] = None, - forecast: int = 5, ) -> xr.Dataset: """Splits demand into *new* and *retrofit* demand. @@ -626,8 +614,6 @@ def new_and_retro_demands( production_method = production if callable(production) else prod_factory(production) assert callable(production_method) - if current_year is None: - current_year = market.year.min() # Interpolate market to forecast year smarket: xr.Dataset = market.interp(year=[current_year, current_year + forecast]) diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index 31e04dae5..d5c2b517c 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -174,30 +174,18 @@ def forecast(self): If no agents with a "forecast" attribute are found, defaults to 5. It cannot be lower than 1 year. """ - forecasts = [ - getattr(agent, "forecast") - for agent in self.agents - if hasattr(agent, "forecast") - ] - if len(forecasts) == 0: - return 5 + forecasts = [getattr(agent, "forecast") for agent in self.agents] return max(1, max(forecasts)) def next( self, mca_market: xr.Dataset, - time_period: int | None = None, - current_year: int | None = None, ) -> xr.Dataset: """Advance sector by one time period. Args: mca_market: Market with ``demand``, ``supply``, and ``prices``. - time_period: - Length of the time period in the framework. Defaults to the range of - ``mca_market.year``. - current_year: Current year of the simulation Returns: A market containing the ``supply`` offered by the sector, it's attendant @@ -208,10 +196,8 @@ def next( def group_assets(x: xr.DataArray) -> xr.DataArray: return xr.Dataset(dict(x=x)).groupby("region").sum("asset").x - if time_period is None: - time_period = int(mca_market.year.max() - mca_market.year.min()) - if current_year is None: - current_year = int(mca_market.year.min()) + time_period = int(mca_market.year.max() - mca_market.year.min()) + current_year = int(mca_market.year.min()) getLogger(__name__).info(f"Running {self.name} for year {current_year}") # Agent interactions @@ -347,6 +333,8 @@ def capacity(self) -> xr.DataArray: for u in self.agents if "dst_region" not in u.assets.capacity.dims ] + + # Only nontraded assets if not traded: full_list = [ list(nontraded[i].year.values) @@ -361,7 +349,9 @@ def capacity(self) -> xr.DataArray: if "dst_region" not in u.assets.capacity.dims ] return reduce_assets(nontraded) - if not nontraded: + + # Only traded assets + elif not nontraded: full_list = [ list(traded[i].year.values) for i in range(len(traded)) @@ -375,15 +365,18 @@ def capacity(self) -> xr.DataArray: if "dst_region" in u.assets.capacity.dims ] return reduce_assets(traded) - traded_results = reduce_assets(traded) - nontraded_results = reduce_assets(nontraded) - return reduce_assets( - [ - traded_results, - nontraded_results - * (nontraded_results.region == traded_results.dst_region), - ] - ) + + # Both traded and nontraded assets + else: + traded_results = reduce_assets(traded) + nontraded_results = reduce_assets(nontraded) + return reduce_assets( + [ + traded_results, + nontraded_results + * (nontraded_results.region == traded_results.dst_region), + ] + ) @property def agents(self) -> Iterator[AbstractAgent]: From 3a053440e517eec2407992a2a0b967f2f10a9c0a Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 4 Oct 2024 15:56:28 +0100 Subject: [PATCH 14/92] Delete new_to_old_timeslice function --- src/muse/readers/__init__.py | 11 ----------- src/muse/timeslices.py | 20 -------------------- 2 files changed, 31 deletions(-) diff --git a/src/muse/readers/__init__.py b/src/muse/readers/__init__.py index 631cecdaf..930dd43ef 100644 --- a/src/muse/readers/__init__.py +++ b/src/muse/readers/__init__.py @@ -19,14 +19,3 @@ def camel_to_snake(name: str) -> str: result = result.replace("n2_o", "N2O") result = result.replace("f-gases", "F-gases") return result - - -def kebab_to_camel(string): - return "".join(x.capitalize() for x in string.split("-")) - - -def snake_to_kebab(string: str) -> str: - from re import sub - - result = sub(r"((?<=[a-z])[A-Z]|(? dict: - """Transforms timeslices defined as DataArray to a pandas dataframe. - - This function is used in the LegacySector class to adapt the new MCA timeslices to - the format required by the old sectors. - """ - length = len(ts.month.values) - converted_ts = { - "Month": [kebab_to_camel(w) for w in ts.month.values], - "Day": [kebab_to_camel(w) for w in ts.day.values], - "Hour": [kebab_to_camel(w) for w in ts.hour.values], - "RepresentHours": list(ts.represent_hours.values.astype(float)), - "SN": list(range(1, length + 1)), - "AgLevel": [ag_level] * length, - } - return converted_ts - - def represent_hours( timeslices: DataArray, nhours: Union[int, float] = 8765.82 ) -> DataArray: From 977647d39838594caff9a927b1ec04ae1fa5709f Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Tue, 8 Oct 2024 15:20:39 +0100 Subject: [PATCH 15/92] Remove unnecessary convert_timeslice operations --- src/muse/sectors/preset_sector.py | 12 ++---------- src/muse/sectors/sector.py | 19 +++++++++---------- 2 files changed, 11 insertions(+), 20 deletions(-) diff --git a/src/muse/sectors/preset_sector.py b/src/muse/sectors/preset_sector.py index 7a35bc2a3..4d61eff2b 100644 --- a/src/muse/sectors/preset_sector.py +++ b/src/muse/sectors/preset_sector.py @@ -151,21 +151,13 @@ def __init__( def next(self, mca_market: Dataset) -> Dataset: """Advance sector by one time period.""" - from muse.timeslices import QuantityType, convert_timeslice - presets = self.presets.sel(region=mca_market.region) supply = self._interpolate(presets.supply, mca_market.year) consumption = self._interpolate(presets.consumption, mca_market.year) costs = self._interpolate(presets.costs, mca_market.year) - result = convert_timeslice( - Dataset({"supply": supply, "consumption": consumption}), - mca_market.timeslice, - QuantityType.EXTENSIVE, - ) - result["costs"] = drop_timeslice( - convert_timeslice(costs, mca_market.timeslice, QuantityType.INTENSIVE) - ) + result = Dataset({"supply": supply, "consumption": consumption}) + result["costs"] = drop_timeslice(costs) assert isinstance(result, Dataset) return result diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index d5c2b517c..f61e546e0 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -397,15 +397,14 @@ def convert_market_timeslice( intensive = (intensive,) timesliced = {d for d in market.data_vars if "timeslice" in market[d].dims} - intensives = convert_timeslice( - market[list(timesliced.intersection(intensive))], - timeslice, - QuantityType.INTENSIVE, - ) - extensives = convert_timeslice( - market[list(timesliced.difference(intensives.data_vars))], - timeslice, - QuantityType.EXTENSIVE, - ) + + intensives = market[list(timesliced.intersection(intensive))] + if "timeslice" not in intensives.dims: + intensives = convert_timeslice( + intensives, + timeslice, + QuantityType.INTENSIVE, + ) + extensives = market[list(timesliced.difference(intensives.data_vars))] others = market[list(set(market.data_vars).difference(timesliced))] return xr.merge([intensives, extensives, others]) From 647d3fe14a99ae24106be9ebb7fe2ec660b66688 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Tue, 8 Oct 2024 15:57:37 +0100 Subject: [PATCH 16/92] Use global TIMESLICE variable throughout --- src/muse/constraints.py | 12 ++++++------ src/muse/costs.py | 14 +++++++------- src/muse/demand_share.py | 12 ++++++------ src/muse/examples.py | 4 ++-- src/muse/investments.py | 4 ++-- src/muse/objectives.py | 16 ++++++++-------- src/muse/outputs/mca.py | 24 ++++++++---------------- src/muse/quantities.py | 16 ++++++++-------- src/muse/readers/csv.py | 8 ++++---- src/muse/sectors/preset_sector.py | 4 ++-- src/muse/sectors/sector.py | 8 ++++---- 11 files changed, 57 insertions(+), 65 deletions(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index 74a55b843..3f3b96ec3 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -446,7 +446,7 @@ def max_production( from xarray import ones_like, zeros_like from muse.commodities import is_enduse - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice if year is None: year = int(market.year.min()) @@ -467,7 +467,7 @@ def max_production( ) capacity = convert_timeslice( techs.fixed_outputs * techs.utilization_factor, - market.timeslice, + TIMESLICE, QuantityType.EXTENSIVE, ) if "asset" not in capacity.dims and "asset" in search_space.dims: @@ -728,7 +728,7 @@ def minimum_service( from xarray import ones_like, zeros_like from muse.commodities import is_enduse - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice if "minimum_service_factor" not in technologies.data_vars: return None @@ -753,7 +753,7 @@ def minimum_service( ) capacity = convert_timeslice( techs.fixed_outputs * techs.minimum_service_factor, - market.timeslice, + TIMESLICE, QuantityType.EXTENSIVE, ) if "asset" not in capacity.dims: @@ -819,11 +819,11 @@ def lp_costs( from xarray import zeros_like from muse.commodities import is_enduse - from muse.timeslices import convert_timeslice + from muse.timeslices import TIMESLICE, convert_timeslice assert "year" not in technologies.dims - ts_costs = convert_timeslice(costs, timeslices) + ts_costs = convert_timeslice(costs, TIMESLICE) selection = dict( commodity=is_enduse(technologies.comm_usage), technology=technologies.technology.isin(costs.replacement), diff --git a/src/muse/costs.py b/src/muse/costs.py index 10ef893d4..ca0c7e213 100644 --- a/src/muse/costs.py +++ b/src/muse/costs.py @@ -13,7 +13,7 @@ from muse.commodities import is_enduse, is_fuel, is_material, is_pollutant from muse.quantities import consumption -from muse.timeslices import QuantityType, convert_timeslice +from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice from muse.utilities import filter_input @@ -98,7 +98,7 @@ def net_present_value( # Cost of installed capacity installed_capacity_costs = convert_timeslice( techs.cap_par * (capacity**techs.cap_exp), - prices.timeslice, + TIMESLICE, QuantityType.EXTENSIVE, ) @@ -122,7 +122,7 @@ def net_present_value( # Fixed and Variable costs fixed_costs = convert_timeslice( techs.fix_par * (capacity**techs.fix_exp), - prices.timeslice, + TIMESLICE, QuantityType.EXTENSIVE, ) variable_costs = techs.var_par * ( @@ -262,7 +262,7 @@ def lifetime_levelized_cost_of_energy( # Cost of installed capacity installed_capacity_costs = convert_timeslice( techs.cap_par * (capacity**techs.cap_exp), - prices.timeslice, + TIMESLICE, QuantityType.EXTENSIVE, ) @@ -286,7 +286,7 @@ def lifetime_levelized_cost_of_energy( # Fixed and Variable costs fixed_costs = convert_timeslice( techs.fix_par * (capacity**techs.fix_exp), - prices.timeslice, + TIMESLICE, QuantityType.EXTENSIVE, ) variable_costs = ( @@ -374,7 +374,7 @@ def annual_levelized_cost_of_energy( annualized_capital_costs = ( convert_timeslice( techs.cap_par * rates, - prices.timeslice, + TIMESLICE, QuantityType.EXTENSIVE, ) / techs.utilization_factor @@ -383,7 +383,7 @@ def annual_levelized_cost_of_energy( o_and_e_costs = ( convert_timeslice( (techs.fix_par + techs.var_par), - prices.timeslice, + TIMESLICE, QuantityType.EXTENSIVE, ) / techs.utilization_factor diff --git a/src/muse/demand_share.py b/src/muse/demand_share.py index 9c7e42120..bc848b627 100644 --- a/src/muse/demand_share.py +++ b/src/muse/demand_share.py @@ -433,7 +433,7 @@ def unmet_forecasted_demand( ) -> xr.DataArray: """Forecast demand that cannot be serviced by non-decommissioned current assets.""" from muse.commodities import is_enduse - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice from muse.utilities import reduce_assets year = current_year + forecast @@ -442,7 +442,7 @@ def unmet_forecasted_demand( capacity = reduce_assets([u.assets.capacity.interp(year=year) for u in agents]) ts_capacity = cast( xr.DataArray, - convert_timeslice(capacity, market.timeslice, QuantityType.EXTENSIVE), + convert_timeslice(capacity, TIMESLICE, QuantityType.EXTENSIVE), ) result = unmet_demand(smarket, ts_capacity, technologies, production) @@ -565,7 +565,7 @@ def new_consumption( """ from numpy import minimum - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice # Interpolate market to forecast year market = market.interp(year=[current_year, current_year + forecast]) @@ -578,7 +578,7 @@ def new_consumption( # Capacity in the forecast year ts_capa = convert_timeslice( capacity.interp(year=current_year + forecast), - market.timeslice, + TIMESLICE, QuantityType.EXTENSIVE, ) assert isinstance(ts_capa, xr.DataArray) @@ -610,7 +610,7 @@ def new_and_retro_demands( from numpy import minimum from muse.production import factory as prod_factory - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice production_method = production if callable(production) else prod_factory(production) assert callable(production_method) @@ -621,7 +621,7 @@ def new_and_retro_demands( # Split capacity between timeslices ts_capa = convert_timeslice( capacity.interp(year=[current_year, current_year + forecast]), - market.timeslice, + TIMESLICE, QuantityType.EXTENSIVE, ) assert isinstance(ts_capa, xr.DataArray) diff --git a/src/muse/examples.py b/src/muse/examples.py index e75823db7..ad35798e3 100644 --- a/src/muse/examples.py +++ b/src/muse/examples.py @@ -242,7 +242,7 @@ def matching_market(sector: str, model: str = "default") -> xr.Dataset: from muse.examples import sector as load_sector from muse.quantities import consumption, maximum_production from muse.sectors import Sector - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice from muse.utilities import agent_concatenation loaded_sector = cast(Sector, load_sector(sector, model)) @@ -253,7 +253,7 @@ def matching_market(sector: str, model: str = "default") -> xr.Dataset: xr.DataArray, convert_timeslice( maximum_production(loaded_sector.technologies, assets.capacity), - loaded_sector.timeslices, + TIMESLICE, QuantityType.EXTENSIVE, ), ) diff --git a/src/muse/investments.py b/src/muse/investments.py index 19598f496..cdae91d27 100644 --- a/src/muse/investments.py +++ b/src/muse/investments.py @@ -247,7 +247,7 @@ def adhoc_match_demand( ) -> xr.DataArray: from muse.demand_matching import demand_matching from muse.quantities import capacity_in_use, maximum_production - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice demand = next(c for c in constraints if c.name == "demand").b @@ -260,7 +260,7 @@ def adhoc_match_demand( commodity=demand.commodity, ).drop_vars("technology") if "timeslice" in demand.dims and "timeslice" not in max_prod.dims: - max_prod = convert_timeslice(max_prod, demand, QuantityType.EXTENSIVE) + max_prod = convert_timeslice(max_prod, TIMESLICE, QuantityType.EXTENSIVE) # Push disabled techs to last rank. # Any production assigned to them by the demand-matching algorithm will be removed. diff --git a/src/muse/objectives.py b/src/muse/objectives.py index 13639bca9..ef0b88691 100644 --- a/src/muse/objectives.py +++ b/src/muse/objectives.py @@ -388,11 +388,11 @@ def lifetime_levelized_cost_of_energy( due to a zero utilisation factor. """ from muse.costs import lifetime_levelized_cost_of_energy as LCOE - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice capacity = capacity_to_service_demand(technologies, demand) production = capacity * technologies.fixed_outputs * technologies.utilization_factor - production = convert_timeslice(production, demand.timeslice, QuantityType.EXTENSIVE) + production = convert_timeslice(production, TIMESLICE, QuantityType.EXTENSIVE) results = LCOE( technologies=technologies, @@ -418,11 +418,11 @@ def net_present_value( See :py:func:`muse.costs.net_present_value` for more details. """ from muse.costs import net_present_value as NPV - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice capacity = capacity_to_service_demand(technologies, demand) production = capacity * technologies.fixed_outputs * technologies.utilization_factor - production = convert_timeslice(production, demand.timeslice, QuantityType.EXTENSIVE) + production = convert_timeslice(production, TIMESLICE, QuantityType.EXTENSIVE) results = NPV( technologies=technologies, @@ -447,11 +447,11 @@ def net_present_cost( See :py:func:`muse.costs.net_present_cost` for more details. """ from muse.costs import net_present_cost as NPC - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice capacity = capacity_to_service_demand(technologies, demand) production = capacity * technologies.fixed_outputs * technologies.utilization_factor - production = convert_timeslice(production, demand.timeslice, QuantityType.EXTENSIVE) + production = convert_timeslice(production, TIMESLICE, QuantityType.EXTENSIVE) results = NPC( technologies=technologies, @@ -476,11 +476,11 @@ def equivalent_annual_cost( See :py:func:`muse.costs.equivalent_annual_cost` for more details. """ from muse.costs import equivalent_annual_cost as EAC - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice capacity = capacity_to_service_demand(technologies, demand) production = capacity * technologies.fixed_outputs * technologies.utilization_factor - production = convert_timeslice(production, demand.timeslice, QuantityType.EXTENSIVE) + production = convert_timeslice(production, TIMESLICE, QuantityType.EXTENSIVE) results = EAC( technologies=technologies, diff --git a/src/muse/outputs/mca.py b/src/muse/outputs/mca.py index adabf600f..3fe34aae2 100644 --- a/src/muse/outputs/mca.py +++ b/src/muse/outputs/mca.py @@ -35,7 +35,7 @@ def quantity( from muse.outputs.sector import market_quantity from muse.registration import registrator from muse.sectors import AbstractSector -from muse.timeslices import QuantityType, convert_timeslice, drop_timeslice +from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice, drop_timeslice from muse.utilities import multiindex_to_coords OUTPUT_QUANTITY_SIGNATURE = Callable[ @@ -334,7 +334,6 @@ def sector_supply(sector: AbstractSector, market: xr.Dataset, **kwargs) -> pd.Da if len(techs) > 0: for a in agents: output_year = a.year - a.forecast - capacity = a.filter_input(a.assets.capacity, year=output_year).fillna(0.0) technologies = a.filter_input(techs, year=output_year).fillna(0.0) agent_market = market.sel(year=output_year).copy() agent_market["consumption"] = drop_timeslice( @@ -353,7 +352,7 @@ def sector_supply(sector: AbstractSector, market: xr.Dataset, **kwargs) -> pd.Da result = convert_timeslice( supply( agent_market, - capacity, + TIMESLICE, technologies, ), agent_market["consumption"].timeslice, @@ -566,7 +565,6 @@ def sector_consumption( if len(techs) > 0: for a in agents: output_year = a.year - a.forecast - capacity = a.filter_input(a.assets.capacity, year=output_year).fillna(0.0) technologies = a.filter_input(techs, year=output_year).fillna(0.0) agent_market = market.sel(year=output_year).copy() agent_market["consumption"] = drop_timeslice( @@ -585,7 +583,7 @@ def sector_consumption( production = convert_timeslice( supply( agent_market, - capacity, + TIMESLICE, technologies, ), agent_market["consumption"].timeslice, @@ -719,15 +717,10 @@ def sector_fuel_costs( ) commodity = is_fuel(technologies.comm_usage) - capacity = a.filter_input( - a.assets.capacity, - year=output_year, - ).fillna(0.0) - production = convert_timeslice( supply( agent_market, - capacity, + TIMESLICE, technologies, ), agent_market["consumption"].timeslice, @@ -775,7 +768,6 @@ def sector_capital_costs( if len(technologies) > 0: for a in agents: - demand = market.consumption * a.quantity output_year = a.year - a.forecast capacity = a.filter_input(a.assets.capacity, year=output_year).fillna(0.0) data = a.filter_input( @@ -786,7 +778,7 @@ def sector_capital_costs( result = data.cap_par * (capacity**data.cap_exp) data_agent = convert_timeslice( result, - demand.timeslice, + TIMESLICE, QuantityType.EXTENSIVE, ) data_agent["agent"] = a.name @@ -848,7 +840,7 @@ def sector_emission_costs( production = convert_timeslice( supply( agent_market, - capacity, + TIMESLICE, technologies, ), agent_market["consumption"].timeslice, @@ -921,7 +913,7 @@ def sector_lcoe(sector: AbstractSector, market: xr.Dataset, **kwargs) -> pd.Data production = capacity * techs.fixed_outputs * techs.utilization_factor production = convert_timeslice( production, - demand.timeslice, + TIMESLICE, QuantityType.EXTENSIVE, ) @@ -999,7 +991,7 @@ def sector_eac(sector: AbstractSector, market: xr.Dataset, **kwargs) -> pd.DataF production = capacity * techs.fixed_outputs * techs.utilization_factor production = convert_timeslice( production, - demand.timeslice, + TIMESLICE, QuantityType.EXTENSIVE, ) diff --git a/src/muse/quantities.py b/src/muse/quantities.py index 66df60c39..561656468 100644 --- a/src/muse/quantities.py +++ b/src/muse/quantities.py @@ -150,7 +150,7 @@ def gross_margin( - non-environmental commodities OUTPUTS are related to revenues. """ from muse.commodities import is_enduse, is_pollutant - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice from muse.utilities import broadcast_techs tech = broadcast_techs( # type: ignore @@ -189,7 +189,7 @@ def gross_margin( # Variable costs depend on factors such as labour variable_costs = convert_timeslice( var_par * ((fixed_outputs.sel(commodity=enduses)).sum("commodity")) ** var_exp, - prices.timeslice, + TIMESLICE, QuantityType.EXTENSIVE, ) @@ -269,7 +269,7 @@ def consumption( are not given, then flexible consumption is *not* considered. """ from muse.commodities import is_enduse, is_fuel - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice from muse.utilities import filter_with_template params = filter_with_template( @@ -284,7 +284,7 @@ def consumption( if prices is not None and "timeslice" in prices.dims: production = convert_timeslice( # type: ignore - production, prices, QuantityType.EXTENSIVE + production, TIMESLICE, QuantityType.EXTENSIVE ) params_fuels = is_fuel(params.comm_usage) @@ -380,7 +380,7 @@ def demand_matched_production( """ from muse.costs import annual_levelized_cost_of_energy as ALCOE from muse.demand_matching import demand_matching - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice from muse.utilities import broadcast_techs technodata = cast(xr.Dataset, broadcast_techs(technologies, capacity)) @@ -389,7 +389,7 @@ def demand_matched_production( assert ("timeslice" in demand.dims) == ("timeslice" in cost.dims) if "timeslice" in demand.dims and "timeslice" not in max_production.dims: max_production = convert_timeslice( - max_production, demand.timeslice, QuantityType.EXTENSIVE + max_production, TIMESLICE, QuantityType.EXTENSIVE ) return demand_matching(demand, cost, max_production) @@ -459,7 +459,7 @@ def costed_production( service is applied first. """ from muse.quantities import maximum_production - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice from muse.utilities import broadcast_techs technodata = cast(xr.Dataset, broadcast_techs(technologies, capacity)) @@ -477,7 +477,7 @@ def group_assets(x: xr.DataArray) -> xr.DataArray: ranking = costs.rank("asset") maxprod = convert_timeslice( maximum_production(technodata, capacity), - demand.timeslice, + TIMESLICE, QuantityType.EXTENSIVE, ) commodity = (maxprod > 0).any([i for i in maxprod.dims if i != "commodity"]) diff --git a/src/muse/readers/csv.py b/src/muse/readers/csv.py index b8ab7fce9..591d2a308 100644 --- a/src/muse/readers/csv.py +++ b/src/muse/readers/csv.py @@ -637,14 +637,14 @@ def read_initial_market( """Read projections, import and export csv files.""" from logging import getLogger - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice # Projections must always be present if isinstance(projections, (str, Path)): getLogger(__name__).info(f"Reading projections from {projections}") projections = read_attribute_table(projections) if timeslices is not None: - projections = convert_timeslice(projections, timeslices, QuantityType.INTENSIVE) + projections = convert_timeslice(projections, TIMESLICE, QuantityType.INTENSIVE) # Base year export is optional. If it is not there, it's set to zero if isinstance(base_year_export, (str, Path)): @@ -664,10 +664,10 @@ def read_initial_market( if timeslices is not None: base_year_export = convert_timeslice( - base_year_export, timeslices, QuantityType.EXTENSIVE + base_year_export, TIMESLICE, QuantityType.EXTENSIVE ) base_year_import = convert_timeslice( - base_year_import, timeslices, QuantityType.EXTENSIVE + base_year_import, TIMESLICE, QuantityType.EXTENSIVE ) base_year_export.name = "exports" base_year_import.name = "imports" diff --git a/src/muse/sectors/preset_sector.py b/src/muse/sectors/preset_sector.py index 4d61eff2b..8539527c4 100644 --- a/src/muse/sectors/preset_sector.py +++ b/src/muse/sectors/preset_sector.py @@ -31,7 +31,7 @@ def factory(cls, name: str, settings: Any) -> PresetSector: read_timeslices, ) from muse.regressions import endogenous_demand - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice sector_conf = getattr(settings.sectors, name) presets = Dataset() @@ -122,7 +122,7 @@ def factory(cls, name: str, settings: Any) -> PresetSector: for component in {"supply", "consumption"}: if "timeslice" not in presets[component].dims: presets[component] = convert_timeslice( - presets[component], timeslice, QuantityType.EXTENSIVE + presets[component], TIMESLICE, QuantityType.EXTENSIVE ) comm_usage = (presets.costs > 0).any(set(presets.costs.dims) - {"commodity"}) diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index f61e546e0..0872240fa 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -285,7 +285,7 @@ def market_variables(self, market: xr.Dataset, technologies: xr.Dataset) -> Any: from muse.commodities import is_pollutant from muse.costs import annual_levelized_cost_of_energy, supply_cost from muse.quantities import consumption - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice from muse.utilities import broadcast_techs years = market.year.values @@ -296,7 +296,7 @@ def market_variables(self, market: xr.Dataset, technologies: xr.Dataset) -> Any: market=market, capacity=capacity, technologies=technologies ) if "timeslice" in market.prices.dims and "timeslice" not in supply.dims: - supply = convert_timeslice(supply, market.timeslice, QuantityType.EXTENSIVE) + supply = convert_timeslice(supply, TIMESLICE, QuantityType.EXTENSIVE) # Calculate consumption consume = consumption(technologies, supply, market.prices) @@ -391,7 +391,7 @@ def convert_market_timeslice( intensive: str | tuple[str] = "prices", ) -> xr.Dataset: """Converts market from one to another timeslice.""" - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice if isinstance(intensive, str): intensive = (intensive,) @@ -402,7 +402,7 @@ def convert_market_timeslice( if "timeslice" not in intensives.dims: intensives = convert_timeslice( intensives, - timeslice, + TIMESLICE, QuantityType.INTENSIVE, ) extensives = market[list(timesliced.difference(intensives.data_vars))] From 8faf4680f8eea03487083d0ab3b4f6689eefbce4 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Tue, 8 Oct 2024 16:12:07 +0100 Subject: [PATCH 17/92] Simplify some other parts of the code accordingly --- src/muse/constraints.py | 11 ++++------- src/muse/examples.py | 1 - src/muse/investments.py | 6 +----- src/muse/mca.py | 1 - src/muse/readers/csv.py | 19 ++++++++----------- 5 files changed, 13 insertions(+), 25 deletions(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index 3f3b96ec3..551b472cc 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -766,9 +766,7 @@ def minimum_service( ) -def lp_costs( - technologies: xr.Dataset, costs: xr.DataArray, timeslices: xr.DataArray -) -> xr.Dataset: +def lp_costs(technologies: xr.Dataset, costs: xr.DataArray) -> xr.Dataset: """Creates costs for solving with scipy's LP solver. Example: @@ -789,7 +787,7 @@ def lp_costs( >>> from muse.constraints import lp_costs >>> lpcosts = lp_costs( - ... technologies.sel(year=2020, region="R1"), costs, timeslices + ... technologies.sel(year=2020, region="R1"), costs ... ) >>> assert "capacity" in lpcosts.data_vars >>> assert "production" in lpcosts.data_vars @@ -1163,7 +1161,7 @@ class ScipyAdapter: In practice, :py:func:`~muse.constraints.lp_costs` helps us define the decision variables (and ``c``). We can verify that the sizes are consistent: - >>> lpcosts = cs.lp_costs(technologies, costs, market.timeslice) + >>> lpcosts = cs.lp_costs(technologies, costs) >>> capsize = lpcosts.capacity.size >>> prodsize = lpcosts.production.size >>> assert inputs.c.size == capsize + prodsize @@ -1198,10 +1196,9 @@ def factory( cls, technologies: xr.Dataset, costs: xr.DataArray, - timeslices: pd.Index, *constraints: Constraint, ) -> ScipyAdapter: - lpcosts = lp_costs(technologies, costs, timeslices) + lpcosts = lp_costs(technologies, costs) data = cls._unified_dataset(technologies, lpcosts, *constraints) diff --git a/src/muse/examples.py b/src/muse/examples.py index ad35798e3..ac592ce8e 100644 --- a/src/muse/examples.py +++ b/src/muse/examples.py @@ -190,7 +190,6 @@ def mca_market(model: str = "default") -> xr.Dataset: base_year_import=getattr( settings.global_input_files, "base_year_import", None ), - timeslices=settings.timeslices, ) .sel(region=settings.regions) .interp(year=settings.time_framework, method=settings.interpolation_mode) diff --git a/src/muse/investments.py b/src/muse/investments.py index cdae91d27..f98efbeda 100644 --- a/src/muse/investments.py +++ b/src/muse/investments.py @@ -308,8 +308,6 @@ def scipy_match_demand( if "timeslice" in costs.dims and timeslice_op is not None: costs = timeslice_op(costs) - timeslice = next(cs.timeslice for cs in constraints if "timeslice" in cs.dims) - # Select technodata for the current year if "year" in technologies.dims and year is None: raise ValueError("Missing year argument") @@ -319,9 +317,7 @@ def scipy_match_demand( techs = technologies # Run scipy optimization with highs solver - adapter = ScipyAdapter.factory( - techs, cast(np.ndarray, costs), timeslice, *constraints - ) + adapter = ScipyAdapter.factory(techs, cast(np.ndarray, costs), *constraints) res = linprog(**adapter.kwargs, method="highs") # Backup: try with highs-ipm diff --git a/src/muse/mca.py b/src/muse/mca.py index 7fa2cd481..aaadbf37d 100644 --- a/src/muse/mca.py +++ b/src/muse/mca.py @@ -57,7 +57,6 @@ def factory(cls, settings: str | Path | Mapping | Any) -> MCA: base_year_import=getattr( settings.global_input_files, "base_year_import", None ), - timeslices=settings.timeslices, ).sel(region=settings.regions) ).interp(year=settings.time_framework, method=settings.interpolation_mode) diff --git a/src/muse/readers/csv.py b/src/muse/readers/csv.py index 591d2a308..7e9ac44da 100644 --- a/src/muse/readers/csv.py +++ b/src/muse/readers/csv.py @@ -632,7 +632,6 @@ def read_initial_market( projections: Union[xr.DataArray, Path, str], base_year_import: Optional[Union[str, Path, xr.DataArray]] = None, base_year_export: Optional[Union[str, Path, xr.DataArray]] = None, - timeslices: Optional[xr.DataArray] = None, ) -> xr.Dataset: """Read projections, import and export csv files.""" from logging import getLogger @@ -643,8 +642,7 @@ def read_initial_market( if isinstance(projections, (str, Path)): getLogger(__name__).info(f"Reading projections from {projections}") projections = read_attribute_table(projections) - if timeslices is not None: - projections = convert_timeslice(projections, TIMESLICE, QuantityType.INTENSIVE) + projections = convert_timeslice(projections, TIMESLICE, QuantityType.INTENSIVE) # Base year export is optional. If it is not there, it's set to zero if isinstance(base_year_export, (str, Path)): @@ -662,13 +660,12 @@ def read_initial_market( getLogger(__name__).info("Base year import not provided. Set to zero.") base_year_import = xr.zeros_like(projections) - if timeslices is not None: - base_year_export = convert_timeslice( - base_year_export, TIMESLICE, QuantityType.EXTENSIVE - ) - base_year_import = convert_timeslice( - base_year_import, TIMESLICE, QuantityType.EXTENSIVE - ) + base_year_export = convert_timeslice( + base_year_export, TIMESLICE, QuantityType.EXTENSIVE + ) + base_year_import = convert_timeslice( + base_year_import, TIMESLICE, QuantityType.EXTENSIVE + ) base_year_export.name = "exports" base_year_import.name = "imports" @@ -688,7 +685,7 @@ def read_initial_market( commodity_price="prices", units_commodity_price="units_prices" ) result["prices"] = ( - result["prices"].expand_dims({"timeslice": timeslices}).drop_vars("timeslice") + result["prices"].expand_dims({"timeslice": TIMESLICE}).drop_vars("timeslice") ) return result From 65c3e489002712be6e28b8f51c3183cfe4069adb Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 9 Oct 2024 12:11:42 +0100 Subject: [PATCH 18/92] Draft new function with intended behaviour --- src/muse/timeslices.py | 48 +++++++++++++++++++++++++----------------- 1 file changed, 29 insertions(+), 19 deletions(-) diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index 001152bfe..10eeb92bc 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -247,8 +247,6 @@ def setup_module(settings: Union[str, Mapping]): def timeslice_projector( x: Union[DataArray, MultiIndex], - finest: Optional[DataArray] = None, - transforms: Optional[dict[tuple, ndarray]] = None, ) -> DataArray: '''Project time-slice to standardized finest time-slices. @@ -347,12 +345,8 @@ def timeslice_projector( from numpy import concatenate, ones_like from xarray import DataArray - if finest is None: - global TIMESLICE - finest = TIMESLICE - if transforms is None: - global TRANSFORMS - transforms = TRANSFORMS + finest = TIMESLICE + transforms = TRANSFORMS index = finest.get_index("timeslice") index = index.set_names(f"finest_{u}" for u in index.names) @@ -396,12 +390,27 @@ class QuantityType(Enum): EXTENSIVE = "extensive" +def convert_timeslice_new(x, ts, quantity): + if hasattr(x, "timeslice"): + return x + + if hasattr(ts, "timeslice"): + ts = ts.timeslice + + extensive = x.expand_dims(timeslice=ts["timeslice"]).assign_coords( + timeslice=ts.indexes["timeslice"] + ) + if quantity is QuantityType.EXTENSIVE: + return extensive + + if quantity is QuantityType.INTENSIVE: + return extensive * (ts / ts.sum()) + + def convert_timeslice( x: Union[DataArray, Dataset], ts: Union[DataArray, Dataset, MultiIndex], quantity: Union[QuantityType, str] = QuantityType.EXTENSIVE, - finest: Optional[DataArray] = None, - transforms: Optional[dict[tuple, ndarray]] = None, ) -> Union[DataArray, Dataset]: '''Adjusts the timeslice of x to match that of ts. @@ -528,21 +537,22 @@ def convert_timeslice( >>> bool(all((weekend * 5).round(6) == (weekdays * 2).round(6))) True ''' - if finest is None: - global TIMESLICE - finest = TIMESLICE - if transforms is None: - global TRANSFORMS - transforms = TRANSFORMS + finest = TIMESLICE + + if hasattr(x, "timeslice"): + return x + if hasattr(ts, "timeslice"): ts = ts.timeslice + has_ts = "timeslice" in getattr(x, "dims", ()) same_ts = has_ts and len(ts) == len(x.timeslice) and x.timeslice.equals(ts) if same_ts or ((not has_ts) and quantity == QuantityType.INTENSIVE): return x - quantity = QuantityType(quantity) - proj0 = timeslice_projector(x, finest=finest, transforms=transforms) - proj1 = timeslice_projector(ts, finest=finest, transforms=transforms) + + proj0 = timeslice_projector(x) + proj1 = timeslice_projector(ts) + if quantity is QuantityType.EXTENSIVE: finest = finest.rename(timeslice="finest_timeslice") index = finest.get_index("finest_timeslice") From d9eb060dd05c1dbce64062b30932d826491322a4 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 10 Oct 2024 14:01:00 +0100 Subject: [PATCH 19/92] Use new function wherever possible --- src/muse/constraints.py | 10 ++++----- src/muse/costs.py | 26 +++++++++++------------ src/muse/demand_share.py | 18 ++++++++-------- src/muse/examples.py | 6 +++--- src/muse/investments.py | 4 ++-- src/muse/objectives.py | 16 +++++++------- src/muse/outputs/mca.py | 35 ++++++++++++++++++------------- src/muse/quantities.py | 18 ++++++++-------- src/muse/readers/csv.py | 15 ++++++++----- src/muse/sectors/preset_sector.py | 6 +++--- src/muse/sectors/sector.py | 10 ++++----- src/muse/timeslices.py | 13 ++++-------- 12 files changed, 91 insertions(+), 86 deletions(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index 551b472cc..e5255b241 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -728,7 +728,7 @@ def minimum_service( from xarray import ones_like, zeros_like from muse.commodities import is_enduse - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new if "minimum_service_factor" not in technologies.data_vars: return None @@ -751,10 +751,10 @@ def minimum_service( .sel(**kwargs) .drop_vars("technology") ) - capacity = convert_timeslice( + capacity = convert_timeslice_new( techs.fixed_outputs * techs.minimum_service_factor, TIMESLICE, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) if "asset" not in capacity.dims: capacity = capacity.expand_dims(asset=search_space.asset) @@ -817,11 +817,11 @@ def lp_costs(technologies: xr.Dataset, costs: xr.DataArray) -> xr.Dataset: from xarray import zeros_like from muse.commodities import is_enduse - from muse.timeslices import TIMESLICE, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new assert "year" not in technologies.dims - ts_costs = convert_timeslice(costs, TIMESLICE) + ts_costs = convert_timeslice_new(costs, TIMESLICE, QuantityType.INTENSIVE) selection = dict( commodity=is_enduse(technologies.comm_usage), technology=technologies.technology.isin(costs.replacement), diff --git a/src/muse/costs.py b/src/muse/costs.py index ca0c7e213..d0710c310 100644 --- a/src/muse/costs.py +++ b/src/muse/costs.py @@ -13,7 +13,7 @@ from muse.commodities import is_enduse, is_fuel, is_material, is_pollutant from muse.quantities import consumption -from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice +from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new from muse.utilities import filter_input @@ -96,10 +96,10 @@ def net_present_value( raw_revenues = (production * prices_non_env * rates).sum(("commodity", "year")) # Cost of installed capacity - installed_capacity_costs = convert_timeslice( + installed_capacity_costs = convert_timeslice_new( techs.cap_par * (capacity**techs.cap_exp), TIMESLICE, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) # Cost related to environmental products @@ -120,10 +120,10 @@ def net_present_value( material_costs = (production * prices_material * rates).sum(("commodity", "year")) # Fixed and Variable costs - fixed_costs = convert_timeslice( + fixed_costs = convert_timeslice_new( techs.fix_par * (capacity**techs.fix_exp), TIMESLICE, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) variable_costs = techs.var_par * ( (production.sel(commodity=products).sum("commodity")) ** techs.var_exp @@ -260,10 +260,10 @@ def lifetime_levelized_cost_of_energy( fuels = is_fuel(technologies.comm_usage) # Cost of installed capacity - installed_capacity_costs = convert_timeslice( + installed_capacity_costs = convert_timeslice_new( techs.cap_par * (capacity**techs.cap_exp), TIMESLICE, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) # Cost related to environmental products @@ -284,10 +284,10 @@ def lifetime_levelized_cost_of_energy( material_costs = (production * prices_material * rates).sum(("commodity", "year")) # Fixed and Variable costs - fixed_costs = convert_timeslice( + fixed_costs = convert_timeslice_new( techs.fix_par * (capacity**techs.fix_exp), TIMESLICE, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) variable_costs = ( techs.var_par * production.sel(commodity=products) ** techs.var_exp @@ -372,19 +372,19 @@ def annual_levelized_cost_of_energy( rates = techs.interest_rate / (1 - (1 + techs.interest_rate) ** (-life)) annualized_capital_costs = ( - convert_timeslice( + convert_timeslice_new( techs.cap_par * rates, TIMESLICE, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) / techs.utilization_factor ) o_and_e_costs = ( - convert_timeslice( + convert_timeslice_new( (techs.fix_par + techs.var_par), TIMESLICE, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) / techs.utilization_factor ) diff --git a/src/muse/demand_share.py b/src/muse/demand_share.py index bc848b627..1212c1763 100644 --- a/src/muse/demand_share.py +++ b/src/muse/demand_share.py @@ -145,7 +145,7 @@ def new_and_retro( A_{a, s}^r = w_s\sum_i A_a^{r, i} with :math:`w_s` a weight associated with each timeslice and determined via - :py:func:`muse.timeslices.convert_timeslice`. + :py:func:`muse.timeslices.convert_timeslice_new`. #. An intermediate quantity, the :py:func:`unmet demand ` :math:`U` is defined from @@ -433,7 +433,7 @@ def unmet_forecasted_demand( ) -> xr.DataArray: """Forecast demand that cannot be serviced by non-decommissioned current assets.""" from muse.commodities import is_enduse - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new from muse.utilities import reduce_assets year = current_year + forecast @@ -442,7 +442,7 @@ def unmet_forecasted_demand( capacity = reduce_assets([u.assets.capacity.interp(year=year) for u in agents]) ts_capacity = cast( xr.DataArray, - convert_timeslice(capacity, TIMESLICE, QuantityType.EXTENSIVE), + convert_timeslice_new(capacity, TIMESLICE, QuantityType.INTENSIVE), ) result = unmet_demand(smarket, ts_capacity, technologies, production) @@ -565,7 +565,7 @@ def new_consumption( """ from numpy import minimum - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new # Interpolate market to forecast year market = market.interp(year=[current_year, current_year + forecast]) @@ -576,10 +576,10 @@ def new_consumption( delta = (forecasted.consumption - current.consumption).clip(min=0) # Capacity in the forecast year - ts_capa = convert_timeslice( + ts_capa = convert_timeslice_new( capacity.interp(year=current_year + forecast), TIMESLICE, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) assert isinstance(ts_capa, xr.DataArray) @@ -610,7 +610,7 @@ def new_and_retro_demands( from numpy import minimum from muse.production import factory as prod_factory - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new production_method = production if callable(production) else prod_factory(production) assert callable(production_method) @@ -619,10 +619,10 @@ def new_and_retro_demands( smarket: xr.Dataset = market.interp(year=[current_year, current_year + forecast]) # Split capacity between timeslices - ts_capa = convert_timeslice( + ts_capa = convert_timeslice_new( capacity.interp(year=[current_year, current_year + forecast]), TIMESLICE, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) assert isinstance(ts_capa, xr.DataArray) diff --git a/src/muse/examples.py b/src/muse/examples.py index ac592ce8e..8cd220de6 100644 --- a/src/muse/examples.py +++ b/src/muse/examples.py @@ -241,7 +241,7 @@ def matching_market(sector: str, model: str = "default") -> xr.Dataset: from muse.examples import sector as load_sector from muse.quantities import consumption, maximum_production from muse.sectors import Sector - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new from muse.utilities import agent_concatenation loaded_sector = cast(Sector, load_sector(sector, model)) @@ -250,10 +250,10 @@ def matching_market(sector: str, model: str = "default") -> xr.Dataset: market = xr.Dataset() production = cast( xr.DataArray, - convert_timeslice( + convert_timeslice_new( maximum_production(loaded_sector.technologies, assets.capacity), TIMESLICE, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ), ) market["supply"] = production.sum("asset") diff --git a/src/muse/investments.py b/src/muse/investments.py index f98efbeda..d23e10c1a 100644 --- a/src/muse/investments.py +++ b/src/muse/investments.py @@ -247,7 +247,7 @@ def adhoc_match_demand( ) -> xr.DataArray: from muse.demand_matching import demand_matching from muse.quantities import capacity_in_use, maximum_production - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new demand = next(c for c in constraints if c.name == "demand").b @@ -260,7 +260,7 @@ def adhoc_match_demand( commodity=demand.commodity, ).drop_vars("technology") if "timeslice" in demand.dims and "timeslice" not in max_prod.dims: - max_prod = convert_timeslice(max_prod, TIMESLICE, QuantityType.EXTENSIVE) + max_prod = convert_timeslice_new(max_prod, TIMESLICE, QuantityType.INTENSIVE) # Push disabled techs to last rank. # Any production assigned to them by the demand-matching algorithm will be removed. diff --git a/src/muse/objectives.py b/src/muse/objectives.py index ef0b88691..653e68249 100644 --- a/src/muse/objectives.py +++ b/src/muse/objectives.py @@ -388,11 +388,11 @@ def lifetime_levelized_cost_of_energy( due to a zero utilisation factor. """ from muse.costs import lifetime_levelized_cost_of_energy as LCOE - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new capacity = capacity_to_service_demand(technologies, demand) production = capacity * technologies.fixed_outputs * technologies.utilization_factor - production = convert_timeslice(production, TIMESLICE, QuantityType.EXTENSIVE) + production = convert_timeslice_new(production, TIMESLICE, QuantityType.INTENSIVE) results = LCOE( technologies=technologies, @@ -418,11 +418,11 @@ def net_present_value( See :py:func:`muse.costs.net_present_value` for more details. """ from muse.costs import net_present_value as NPV - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new capacity = capacity_to_service_demand(technologies, demand) production = capacity * technologies.fixed_outputs * technologies.utilization_factor - production = convert_timeslice(production, TIMESLICE, QuantityType.EXTENSIVE) + production = convert_timeslice_new(production, TIMESLICE, QuantityType.INTENSIVE) results = NPV( technologies=technologies, @@ -447,11 +447,11 @@ def net_present_cost( See :py:func:`muse.costs.net_present_cost` for more details. """ from muse.costs import net_present_cost as NPC - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new capacity = capacity_to_service_demand(technologies, demand) production = capacity * technologies.fixed_outputs * technologies.utilization_factor - production = convert_timeslice(production, TIMESLICE, QuantityType.EXTENSIVE) + production = convert_timeslice_new(production, TIMESLICE, QuantityType.INTENSIVE) results = NPC( technologies=technologies, @@ -476,11 +476,11 @@ def equivalent_annual_cost( See :py:func:`muse.costs.equivalent_annual_cost` for more details. """ from muse.costs import equivalent_annual_cost as EAC - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new capacity = capacity_to_service_demand(technologies, demand) production = capacity * technologies.fixed_outputs * technologies.utilization_factor - production = convert_timeslice(production, TIMESLICE, QuantityType.EXTENSIVE) + production = convert_timeslice_new(production, TIMESLICE, QuantityType.INTENSIVE) results = EAC( technologies=technologies, diff --git a/src/muse/outputs/mca.py b/src/muse/outputs/mca.py index 3fe34aae2..12f38fea1 100644 --- a/src/muse/outputs/mca.py +++ b/src/muse/outputs/mca.py @@ -35,7 +35,12 @@ def quantity( from muse.outputs.sector import market_quantity from muse.registration import registrator from muse.sectors import AbstractSector -from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice, drop_timeslice +from muse.timeslices import ( + TIMESLICE, + QuantityType, + convert_timeslice_new, + drop_timeslice, +) from muse.utilities import multiindex_to_coords OUTPUT_QUANTITY_SIGNATURE = Callable[ @@ -349,14 +354,14 @@ def sector_supply(sector: AbstractSector, market: xr.Dataset, **kwargs) -> pd.Da ] agent_market.loc[dict(commodity=excluded)] = 0 - result = convert_timeslice( + result = convert_timeslice_new( supply( agent_market, TIMESLICE, technologies, ), agent_market["consumption"].timeslice, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) if "year" in result.dims: @@ -580,14 +585,14 @@ def sector_consumption( ] agent_market.loc[dict(commodity=excluded)] = 0 - production = convert_timeslice( + production = convert_timeslice_new( supply( agent_market, TIMESLICE, technologies, ), agent_market["consumption"].timeslice, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) prices = a.filter_input(market.prices, year=output_year) result = consumption( @@ -717,14 +722,14 @@ def sector_fuel_costs( ) commodity = is_fuel(technologies.comm_usage) - production = convert_timeslice( + production = convert_timeslice_new( supply( agent_market, TIMESLICE, technologies, ), agent_market["consumption"].timeslice, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) prices = a.filter_input(market.prices, year=output_year) @@ -776,10 +781,10 @@ def sector_capital_costs( technology=capacity.technology, ) result = data.cap_par * (capacity**data.cap_exp) - data_agent = convert_timeslice( + data_agent = convert_timeslice_new( result, TIMESLICE, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) data_agent["agent"] = a.name data_agent["category"] = a.category @@ -837,14 +842,14 @@ def sector_emission_costs( i = (np.where(envs))[0][0] red_envs = envs[i].commodity.values prices = a.filter_input(market.prices, year=output_year, commodity=red_envs) - production = convert_timeslice( + production = convert_timeslice_new( supply( agent_market, TIMESLICE, technologies, ), agent_market["consumption"].timeslice, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) total = production.sel(commodity=enduses).sum("commodity") data_agent = total * (allemissions * prices).sum("commodity") @@ -911,10 +916,10 @@ def sector_lcoe(sector: AbstractSector, market: xr.Dataset, **kwargs) -> pd.Data demand = agent_market.consumption.sel(commodity=included) capacity = agent.filter_input(capacity_to_service_demand(demand, techs)) production = capacity * techs.fixed_outputs * techs.utilization_factor - production = convert_timeslice( + production = convert_timeslice_new( production, TIMESLICE, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) result = LCOE( @@ -989,10 +994,10 @@ def sector_eac(sector: AbstractSector, market: xr.Dataset, **kwargs) -> pd.DataF demand = agent_market.consumption.sel(commodity=included) capacity = agent.filter_input(capacity_to_service_demand(demand, techs)) production = capacity * techs.fixed_outputs * techs.utilization_factor - production = convert_timeslice( + production = convert_timeslice_new( production, TIMESLICE, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) result = EAC( diff --git a/src/muse/quantities.py b/src/muse/quantities.py index 561656468..4c58fefb5 100644 --- a/src/muse/quantities.py +++ b/src/muse/quantities.py @@ -150,7 +150,7 @@ def gross_margin( - non-environmental commodities OUTPUTS are related to revenues. """ from muse.commodities import is_enduse, is_pollutant - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new from muse.utilities import broadcast_techs tech = broadcast_techs( # type: ignore @@ -187,10 +187,10 @@ def gross_margin( enduses = is_enduse(technologies.comm_usage) # Variable costs depend on factors such as labour - variable_costs = convert_timeslice( + variable_costs = convert_timeslice_new( var_par * ((fixed_outputs.sel(commodity=enduses)).sum("commodity")) ** var_exp, TIMESLICE, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) # The individual prices are selected @@ -269,7 +269,7 @@ def consumption( are not given, then flexible consumption is *not* considered. """ from muse.commodities import is_enduse, is_fuel - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new from muse.utilities import filter_with_template params = filter_with_template( @@ -283,8 +283,8 @@ def consumption( production = production.sel(commodity=is_enduse(comm_usage)).sum("commodity") if prices is not None and "timeslice" in prices.dims: - production = convert_timeslice( # type: ignore - production, TIMESLICE, QuantityType.EXTENSIVE + production = convert_timeslice_new( # type: ignore + production, TIMESLICE, QuantityType.INTENSIVE ) params_fuels = is_fuel(params.comm_usage) @@ -380,7 +380,7 @@ def demand_matched_production( """ from muse.costs import annual_levelized_cost_of_energy as ALCOE from muse.demand_matching import demand_matching - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new from muse.utilities import broadcast_techs technodata = cast(xr.Dataset, broadcast_techs(technologies, capacity)) @@ -388,8 +388,8 @@ def demand_matched_production( max_production = maximum_production(technodata, capacity, **filters) assert ("timeslice" in demand.dims) == ("timeslice" in cost.dims) if "timeslice" in demand.dims and "timeslice" not in max_production.dims: - max_production = convert_timeslice( - max_production, TIMESLICE, QuantityType.EXTENSIVE + max_production = convert_timeslice_new( + max_production, TIMESLICE, QuantityType.INTENSIVE ) return demand_matching(demand, cost, max_production) diff --git a/src/muse/readers/csv.py b/src/muse/readers/csv.py index 7e9ac44da..755d1f0ce 100644 --- a/src/muse/readers/csv.py +++ b/src/muse/readers/csv.py @@ -636,7 +636,12 @@ def read_initial_market( """Read projections, import and export csv files.""" from logging import getLogger - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import ( + TIMESLICE, + QuantityType, + convert_timeslice, + convert_timeslice_new, + ) # Projections must always be present if isinstance(projections, (str, Path)): @@ -660,11 +665,11 @@ def read_initial_market( getLogger(__name__).info("Base year import not provided. Set to zero.") base_year_import = xr.zeros_like(projections) - base_year_export = convert_timeslice( - base_year_export, TIMESLICE, QuantityType.EXTENSIVE + base_year_export = convert_timeslice_new( + base_year_export, TIMESLICE, QuantityType.INTENSIVE ) - base_year_import = convert_timeslice( - base_year_import, TIMESLICE, QuantityType.EXTENSIVE + base_year_import = convert_timeslice_new( + base_year_import, TIMESLICE, QuantityType.INTENSIVE ) base_year_export.name = "exports" base_year_import.name = "imports" diff --git a/src/muse/sectors/preset_sector.py b/src/muse/sectors/preset_sector.py index 8539527c4..12c9da18c 100644 --- a/src/muse/sectors/preset_sector.py +++ b/src/muse/sectors/preset_sector.py @@ -31,7 +31,7 @@ def factory(cls, name: str, settings: Any) -> PresetSector: read_timeslices, ) from muse.regressions import endogenous_demand - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new sector_conf = getattr(settings.sectors, name) presets = Dataset() @@ -121,8 +121,8 @@ def factory(cls, name: str, settings: Any) -> PresetSector: # add timeslice, if missing for component in {"supply", "consumption"}: if "timeslice" not in presets[component].dims: - presets[component] = convert_timeslice( - presets[component], TIMESLICE, QuantityType.EXTENSIVE + presets[component] = convert_timeslice_new( + presets[component], TIMESLICE, QuantityType.INTENSIVE ) comm_usage = (presets.costs > 0).any(set(presets.costs.dims) - {"commodity"}) diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index 0872240fa..f52410b33 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -285,7 +285,7 @@ def market_variables(self, market: xr.Dataset, technologies: xr.Dataset) -> Any: from muse.commodities import is_pollutant from muse.costs import annual_levelized_cost_of_energy, supply_cost from muse.quantities import consumption - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new from muse.utilities import broadcast_techs years = market.year.values @@ -296,7 +296,7 @@ def market_variables(self, market: xr.Dataset, technologies: xr.Dataset) -> Any: market=market, capacity=capacity, technologies=technologies ) if "timeslice" in market.prices.dims and "timeslice" not in supply.dims: - supply = convert_timeslice(supply, TIMESLICE, QuantityType.EXTENSIVE) + supply = convert_timeslice_new(supply, TIMESLICE, QuantityType.INTENSIVE) # Calculate consumption consume = consumption(technologies, supply, market.prices) @@ -391,7 +391,7 @@ def convert_market_timeslice( intensive: str | tuple[str] = "prices", ) -> xr.Dataset: """Converts market from one to another timeslice.""" - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new if isinstance(intensive, str): intensive = (intensive,) @@ -400,10 +400,10 @@ def convert_market_timeslice( intensives = market[list(timesliced.intersection(intensive))] if "timeslice" not in intensives.dims: - intensives = convert_timeslice( + intensives = convert_timeslice_new( intensives, TIMESLICE, - QuantityType.INTENSIVE, + QuantityType.EXTENSIVE, ) extensives = market[list(timesliced.difference(intensives.data_vars))] others = market[list(set(market.data_vars).difference(timesliced))] diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index 10eeb92bc..f4bf4d6b6 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -391,15 +391,13 @@ class QuantityType(Enum): def convert_timeslice_new(x, ts, quantity): + from xarray import Coordinates + if hasattr(x, "timeslice"): return x - if hasattr(ts, "timeslice"): - ts = ts.timeslice - - extensive = x.expand_dims(timeslice=ts["timeslice"]).assign_coords( - timeslice=ts.indexes["timeslice"] - ) + mindex_coords = Coordinates.from_pandas_multiindex(ts.timeslice, "timeslice") + extensive = x.expand_dims(timeslice=ts["timeslice"]).assign_coords(mindex_coords) if quantity is QuantityType.EXTENSIVE: return extensive @@ -539,9 +537,6 @@ def convert_timeslice( ''' finest = TIMESLICE - if hasattr(x, "timeslice"): - return x - if hasattr(ts, "timeslice"): ts = ts.timeslice From 7ebab9ee3f1c8a58b67beb1f8aae812c001ec3ec Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 10 Oct 2024 16:08:53 +0100 Subject: [PATCH 20/92] Update tests --- src/muse/readers/csv.py | 2 -- tests/test_constraints.py | 22 ++++++++-------- tests/test_costs.py | 6 ++--- tests/test_demand_share.py | 12 ++++----- tests/test_quantities.py | 36 +++++++++++++------------- tests/test_timeslices.py | 52 -------------------------------------- 6 files changed, 38 insertions(+), 92 deletions(-) diff --git a/src/muse/readers/csv.py b/src/muse/readers/csv.py index 755d1f0ce..917936b0f 100644 --- a/src/muse/readers/csv.py +++ b/src/muse/readers/csv.py @@ -639,7 +639,6 @@ def read_initial_market( from muse.timeslices import ( TIMESLICE, QuantityType, - convert_timeslice, convert_timeslice_new, ) @@ -647,7 +646,6 @@ def read_initial_market( if isinstance(projections, (str, Path)): getLogger(__name__).info(f"Reading projections from {projections}") projections = read_attribute_table(projections) - projections = convert_timeslice(projections, TIMESLICE, QuantityType.INTENSIVE) # Base year export is optional. If it is not there, it's set to zero if isinstance(base_year_export, (str, Path)): diff --git a/tests/test_constraints.py b/tests/test_constraints.py index 0f307f321..e816c240e 100644 --- a/tests/test_constraints.py +++ b/tests/test_constraints.py @@ -61,7 +61,6 @@ def lpcosts(technologies, market, costs): return lp_costs( technologies.interp(year=market.year.min() + 5).drop_vars("year"), costs=costs, - timeslices=market.timeslice, ) @@ -73,13 +72,14 @@ def assets(residential): @fixture def market_demand(assets, technologies, market): from muse.quantities import maximum_production - from muse.timeslices import convert_timeslice + from muse.timeslices import QuantityType, convert_timeslice_new return 0.8 * maximum_production( technologies.interp(year=2025), - convert_timeslice( + convert_timeslice_new( assets.capacity.sel(year=2025).groupby("technology").sum("asset"), market, + QuantityType.INTENSIVE, ), ).rename(technology="asset") @@ -227,7 +227,7 @@ def test_to_scipy_adapter_maxprod(technologies, costs, max_production, timeslice assert adapter.b_ub.size == adapter.A_ub.shape[0] assert adapter.c.size == adapter.A_ub.shape[1] - lpcosts = lp_costs(technologies, costs, timeslices) + lpcosts = lp_costs(technologies, costs) capsize = lpcosts.capacity.size prodsize = lpcosts.production.size assert adapter.c.size == capsize + prodsize @@ -254,7 +254,7 @@ def test_to_scipy_adapter_demand(technologies, costs, demand_constraint, timesli assert adapter.b_ub.size == adapter.A_ub.shape[0] assert adapter.c.size == adapter.A_ub.shape[1] - lpcosts = lp_costs(technologies, costs, timeslices) + lpcosts = lp_costs(technologies, costs) capsize = lpcosts.capacity.size prodsize = lpcosts.production.size assert adapter.c.size == capsize + prodsize @@ -290,7 +290,7 @@ def test_to_scipy_adapter_max_capacity_expansion( assert adapter.c.size == adapter.A_ub.shape[1] assert adapter.c.ndim == 1 - lpcosts = lp_costs(technologies, costs, timeslices) + lpcosts = lp_costs(technologies, costs) capsize = lpcosts.capacity.size prodsize = lpcosts.production.size assert adapter.c.size == capsize + prodsize @@ -314,7 +314,7 @@ def test_to_scipy_adapter_no_constraint(technologies, costs, timeslices): assert adapter.b_eq is None assert adapter.c.ndim == 1 - lpcosts = lp_costs(technologies, costs, timeslices) + lpcosts = lp_costs(technologies, costs) capsize = lpcosts.capacity.size prodsize = lpcosts.production.size assert adapter.c.size == capsize + prodsize @@ -325,7 +325,7 @@ def test_back_to_muse_capacity(technologies, costs, timeslices): technologies = technologies.interp(year=2025) - lpcosts = lp_costs(technologies, costs, timeslices) + lpcosts = lp_costs(technologies, costs) data = ScipyAdapter._unified_dataset(technologies, lpcosts) lpquantity = ScipyAdapter._selected_quantity(data, "capacity") assert set(lpquantity.dims) == {"d(asset)", "d(replacement)"} @@ -340,7 +340,7 @@ def test_back_to_muse_production(technologies, costs, timeslices): technologies = technologies.interp(year=2025) - lpcosts = lp_costs(technologies, costs, timeslices) + lpcosts = lp_costs(technologies, costs) data = ScipyAdapter._unified_dataset(technologies, lpcosts) lpquantity = ScipyAdapter._selected_quantity(data, "production") assert set(lpquantity.dims) == { @@ -359,7 +359,7 @@ def test_back_to_muse_all(technologies, costs, timeslices, rng: np.random.Genera from muse.constraints import ScipyAdapter, lp_costs technologies = technologies.interp(year=2025) - lpcosts = lp_costs(technologies, costs, timeslices) + lpcosts = lp_costs(technologies, costs) data = ScipyAdapter._unified_dataset(technologies, lpcosts) lpcapacity = ScipyAdapter._selected_quantity(data, "capacity") @@ -390,7 +390,7 @@ def test_scipy_adapter_back_to_muse(technologies, costs, timeslices, rng): from muse.constraints import ScipyAdapter, lp_costs technologies = technologies.interp(year=2025) - lpcosts = lp_costs(technologies, costs, timeslices) + lpcosts = lp_costs(technologies, costs) data = ScipyAdapter._unified_dataset(technologies, lpcosts) lpcapacity = ScipyAdapter._selected_quantity(data, "capacity") diff --git a/tests/test_costs.py b/tests/test_costs.py index 4270d5b2a..1bbefe0b1 100644 --- a/tests/test_costs.py +++ b/tests/test_costs.py @@ -19,13 +19,13 @@ def _capacity(technologies, demand_share): @fixture def _production(technologies, _capacity, demand_share): - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import QuantityType, convert_timeslice_new production = ( _capacity * technologies.fixed_outputs * technologies.utilization_factor ) - production = convert_timeslice( - production, demand_share.timeslice, QuantityType.EXTENSIVE + production = convert_timeslice_new( + production, demand_share.timeslice, QuantityType.INTENSIVE ) return production diff --git a/tests/test_demand_share.py b/tests/test_demand_share.py index 9661a180b..a8c282687 100644 --- a/tests/test_demand_share.py +++ b/tests/test_demand_share.py @@ -19,13 +19,13 @@ def _matching_market(technologies, stock, timeslice): from numpy.random import random from muse.quantities import consumption, maximum_production - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import QuantityType, convert_timeslice_new market = xr.Dataset() - production = convert_timeslice( + production = convert_timeslice_new( maximum_production(technologies, stock.capacity), timeslice, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) market["supply"] = production.sum("asset") market["consumption"] = drop_timeslice( @@ -126,7 +126,7 @@ def test_new_retro_split_zero_new_unmet(technologies, stock, matching_market): def test_new_retro_accounting_identity(technologies, stock, market): from muse.demand_share import new_and_retro_demands from muse.production import factory - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import QuantityType, convert_timeslice_new share = new_and_retro_demands( stock.capacity, market, technologies, current_year=2010, forecast=5 @@ -134,14 +134,14 @@ def test_new_retro_accounting_identity(technologies, stock, market): assert (share >= 0).all() production_method = factory() - serviced = convert_timeslice( + serviced = convert_timeslice_new( production_method( market.interp(year=2015), stock.capacity.interp(year=2015), technologies ) .groupby("region") .sum("asset"), market.timeslice, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) consumption = market.consumption.interp(year=2015) diff --git a/tests/test_quantities.py b/tests/test_quantities.py index 2d0711ea5..280209833 100644 --- a/tests/test_quantities.py +++ b/tests/test_quantities.py @@ -403,7 +403,7 @@ def test_demand_matched_production( ): from muse.commodities import CommodityUsage, is_enduse from muse.quantities import demand_matched_production, maximum_production - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import QuantityType, convert_timeslice_new # try and make sure we have a few more outputs than the default fixture technologies.comm_usage[:] = np.random.choice( @@ -414,10 +414,10 @@ def test_demand_matched_production( technologies.fixed_outputs[:] *= is_enduse(technologies.comm_usage) capacity = capacity.sel(year=capacity.year.min(), drop=True) - max_prod = convert_timeslice( + max_prod = convert_timeslice_new( maximum_production(technologies, capacity), demand.timeslice, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) demand = max_prod.sum("asset") demand[:] *= np.random.choice([0, 1, 1 / 2, 1 / 3, 1 / 10], demand.shape) @@ -434,7 +434,7 @@ def test_costed_production_exact_match(market, capacity, technologies): costed_production, maximum_production, ) - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import QuantityType, convert_timeslice_new from muse.utilities import broadcast_techs if set(capacity.region.values) != set(market.region.values): @@ -445,13 +445,13 @@ def test_costed_production_exact_match(market, capacity, technologies): costs = annual_levelized_cost_of_energy( prices=market.prices.sel(region=technodata.region), technologies=technodata ) - maxdemand = convert_timeslice( + maxdemand = convert_timeslice_new( xr.Dataset(dict(mp=maximum_production(technologies, capacity))) .groupby("region") .sum("asset") .mp, market, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) market["consumption"] = drop_timeslice(maxdemand) result = costed_production(market.consumption, costs, capacity, technologies) @@ -469,16 +469,16 @@ def test_costed_production_single_region(market, capacity, technologies): costed_production, maximum_production, ) - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import QuantityType, convert_timeslice_new from muse.utilities import broadcast_techs capacity = capacity.drop_vars("region") capacity["region"] = "USA" market = market.sel(region=[capacity.region.values]) - maxdemand = convert_timeslice( + maxdemand = convert_timeslice_new( maximum_production(technologies, capacity).sum("asset"), market, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) market["consumption"] = drop_timeslice(0.9 * maxdemand) technodata = broadcast_techs(technologies, capacity) @@ -500,18 +500,18 @@ def test_costed_production_single_year(market, capacity, technologies): costed_production, maximum_production, ) - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import QuantityType, convert_timeslice_new from muse.utilities import broadcast_techs capacity = capacity.sel(year=2010) market = market.sel(year=2010) - maxdemand = convert_timeslice( + maxdemand = convert_timeslice_new( xr.Dataset(dict(mp=maximum_production(technologies, capacity))) .groupby("region") .sum("asset") .mp, market, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) market["consumption"] = drop_timeslice(0.9 * maxdemand) technodata = broadcast_techs(technologies, capacity) @@ -533,7 +533,7 @@ def test_costed_production_over_capacity(market, capacity, technologies): costed_production, maximum_production, ) - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import QuantityType, convert_timeslice_new from muse.utilities import broadcast_techs capacity = capacity.isel(asset=[0, 1, 2]) @@ -541,13 +541,13 @@ def test_costed_production_over_capacity(market, capacity, technologies): capacity.region.values[: len(set(market.region.values))] = list( set(market.region.values) ) - maxdemand = convert_timeslice( + maxdemand = convert_timeslice_new( xr.Dataset(dict(mp=maximum_production(technologies, capacity))) .groupby("region") .sum("asset") .mp, market, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) market["consumption"] = drop_timeslice(maxdemand * 0.9) technodata = broadcast_techs(technologies, capacity) @@ -569,7 +569,7 @@ def test_costed_production_with_minimum_service(market, capacity, technologies, costed_production, maximum_production, ) - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import QuantityType, convert_timeslice_new from muse.utilities import broadcast_techs if set(capacity.region.values) != set(market.region.values): @@ -580,8 +580,8 @@ def test_costed_production_with_minimum_service(market, capacity, technologies, technologies.utilization_factor.dims, rng.uniform(low=0.5, high=0.9, size=technologies.utilization_factor.shape), ) - maxprod = convert_timeslice( - maximum_production(technologies, capacity), market, QuantityType.EXTENSIVE + maxprod = convert_timeslice_new( + maximum_production(technologies, capacity), market, QuantityType.INTENSIVE ) minprod = maxprod * broadcast_techs(technologies.minimum_service_factor, maxprod) maxdemand = xr.Dataset(dict(mp=minprod)).groupby("region").sum("asset").mp diff --git a/tests/test_timeslices.py b/tests/test_timeslices.py index 7f478d027..b4fcd1b6c 100644 --- a/tests/test_timeslices.py +++ b/tests/test_timeslices.py @@ -3,8 +3,6 @@ from pytest import approx, fixture from xarray import DataArray -from muse.timeslices import QuantityType, convert_timeslice - @fixture def toml(): @@ -60,56 +58,6 @@ def timeslice_dataarray(reference): ) -def test_convert_extensive_timeslice(reference, timeslice_dataarray, transforms): - z = convert_timeslice( - timeslice_dataarray, reference, finest=reference, transforms=transforms - ) - assert z.shape == reference.shape - assert z.values == approx( - [ - float( - timeslice_dataarray[0] * reference[0] / (reference[0] + reference[1]) - ), - float( - timeslice_dataarray[0] * reference[1] / (reference[0] + reference[1]) - ), - 0, - 0, - float(timeslice_dataarray[1]), - 0, - 0, - 0, - float(timeslice_dataarray[2]), - 0, - ] - ) - - -def test_convert_intensive_timeslice(reference, timeslice_dataarray, transforms): - z = convert_timeslice( - timeslice_dataarray, - reference, - finest=reference, - transforms=transforms, - quantity=QuantityType.INTENSIVE, - ) - - assert z.values == approx( - [ - float(timeslice_dataarray[0]), - float(timeslice_dataarray[0]), - 0, - 0, - float(timeslice_dataarray[1]), - 0, - 0, - 0, - float(timeslice_dataarray[2]), - 0, - ] - ) - - def test_reference_timeslice(): from toml import loads From a0fe43c3d4b605e43c7d8aced953d6659a8dc00d Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 10 Oct 2024 16:18:05 +0100 Subject: [PATCH 21/92] Remove represent_hours function --- src/muse/objectives.py | 13 ++++--------- src/muse/quantities.py | 7 ++----- src/muse/timeslices.py | 14 -------------- 3 files changed, 6 insertions(+), 28 deletions(-) diff --git a/src/muse/objectives.py b/src/muse/objectives.py index 653e68249..284504cc9 100644 --- a/src/muse/objectives.py +++ b/src/muse/objectives.py @@ -214,12 +214,8 @@ def capacity_to_service_demand( ) -> xr.DataArray: """Minimum capacity required to fulfill the demand.""" from muse.quantities import capacity_to_service_demand - from muse.timeslices import represent_hours - hours = represent_hours(demand.timeslice) - return capacity_to_service_demand( - demand=demand, technologies=technologies, hours=hours - ) + return capacity_to_service_demand(demand=demand, technologies=technologies) @register_objective @@ -230,13 +226,12 @@ def capacity_in_use( **kwargs, ): from muse.commodities import is_enduse - from muse.timeslices import represent_hours + from muse.timeslices import TIMESLICE - hours = represent_hours(demand.timeslice) enduses = is_enduse(technologies.comm_usage.sel(commodity=demand.commodity)) return ( - (demand.sel(commodity=enduses).sum("commodity") / hours).sum("timeslice") - * hours.sum() + (demand.sel(commodity=enduses).sum("commodity") / TIMESLICE).sum("timeslice") + * TIMESLICE.sum() / technologies.utilization_factor ) diff --git a/src/muse/quantities.py b/src/muse/quantities.py index 4c58fefb5..0773b9405 100644 --- a/src/muse/quantities.py +++ b/src/muse/quantities.py @@ -529,14 +529,11 @@ def group_assets(x: xr.DataArray) -> xr.DataArray: def capacity_to_service_demand( demand: xr.DataArray, technologies: xr.Dataset, - hours=None, ) -> xr.DataArray: """Minimum capacity required to fulfill the demand.""" - from muse.timeslices import represent_hours + from muse.timeslices import TIMESLICE - if hours is None: - hours = represent_hours(demand.timeslice) - max_hours = hours.max() / hours.sum() + max_hours = TIMESLICE.max() / TIMESLICE.sum() commodity_output = technologies.fixed_outputs.sel(commodity=demand.commodity) max_demand = ( demand.where(commodity_output > 0, 0) diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index f4bf4d6b6..cf27f463c 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -6,7 +6,6 @@ "convert_timeslice", "timeslice_projector", "setup_module", - "represent_hours", ] from collections.abc import Mapping, Sequence @@ -588,19 +587,6 @@ def new_to_old_timeslice(ts: DataArray, ag_level="Month") -> dict: return converted_ts -def represent_hours( - timeslices: DataArray, nhours: Union[int, float] = 8765.82 -) -> DataArray: - """Number of hours per timeslice. - - Arguments: - timeslices: The timeslice for which to compute the number of hours - nhours: The total number of hours represented in the timeslice. Defaults to the - average number of hours in year. - """ - return convert_timeslice(DataArray([nhours]), timeslices).squeeze() - - def drop_timeslice(data: DataArray) -> DataArray: """Drop the timeslice variable from a DataArray. From c2b94e7a4782a3b1770fc75c82a24bc91bb17064 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 11 Oct 2024 10:08:46 +0100 Subject: [PATCH 22/92] Fix issue with timeslice ordering --- src/muse/constraints.py | 10 +++++++--- src/muse/timeslices.py | 1 + 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index e5255b241..8ae44a40b 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -446,7 +446,11 @@ def max_production( from xarray import ones_like, zeros_like from muse.commodities import is_enduse - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import ( + TIMESLICE, + QuantityType, + convert_timeslice_new, + ) if year is None: year = int(market.year.min()) @@ -465,10 +469,10 @@ def max_production( .sel(**kwargs) .drop_vars("technology") ) - capacity = convert_timeslice( + capacity = convert_timeslice_new( techs.fixed_outputs * techs.utilization_factor, TIMESLICE, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) if "asset" not in capacity.dims and "asset" in search_space.dims: capacity = capacity.expand_dims(asset=search_space.asset) diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index cf27f463c..1caf8b2b7 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -393,6 +393,7 @@ def convert_timeslice_new(x, ts, quantity): from xarray import Coordinates if hasattr(x, "timeslice"): + x = x.sel(timeslice=ts["timeslice"]) return x mindex_coords = Coordinates.from_pandas_multiindex(ts.timeslice, "timeslice") From 5cbc8f2afd2248961611511f6ea4e101a816c2f0 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 11 Oct 2024 10:53:56 +0100 Subject: [PATCH 23/92] Remove remaining convert_timeslice calls --- src/muse/investments.py | 4 ++-- src/muse/quantities.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/muse/investments.py b/src/muse/investments.py index d23e10c1a..97b23b9f6 100644 --- a/src/muse/investments.py +++ b/src/muse/investments.py @@ -126,9 +126,9 @@ def factory(settings: Optional[Union[str, Mapping]] = None) -> Callable: if top.lower() == "max": def timeslice_op(x: xr.DataArray) -> xr.DataArray: - from muse.timeslices import convert_timeslice + from muse.timeslices import TIMESLICE - return (x / convert_timeslice(xr.DataArray(1), x)).max("timeslice") + return (x / (TIMESLICE / sum(TIMESLICE))).max("timeslice") elif top.lower() == "sum": diff --git a/src/muse/quantities.py b/src/muse/quantities.py index 0773b9405..310c47d99 100644 --- a/src/muse/quantities.py +++ b/src/muse/quantities.py @@ -459,7 +459,7 @@ def costed_production( service is applied first. """ from muse.quantities import maximum_production - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new from muse.utilities import broadcast_techs technodata = cast(xr.Dataset, broadcast_techs(technologies, capacity)) @@ -475,10 +475,10 @@ def group_assets(x: xr.DataArray) -> xr.DataArray: return xr.Dataset(dict(x=x)).groupby("region").sum("asset").x ranking = costs.rank("asset") - maxprod = convert_timeslice( + maxprod = convert_timeslice_new( maximum_production(technodata, capacity), TIMESLICE, - QuantityType.EXTENSIVE, + QuantityType.INTENSIVE, ) commodity = (maxprod > 0).any([i for i in maxprod.dims if i != "commodity"]) commodity = commodity.drop_vars( From 81e7a6acbd18f6a84f5212dc78da1619367d6289 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 11 Oct 2024 11:02:55 +0100 Subject: [PATCH 24/92] Simplify timeslice_op function --- src/muse/investments.py | 40 +++++++++++----------------------------- 1 file changed, 11 insertions(+), 29 deletions(-) diff --git a/src/muse/investments.py b/src/muse/investments.py index 97b23b9f6..bc731c906 100644 --- a/src/muse/investments.py +++ b/src/muse/investments.py @@ -121,25 +121,6 @@ def factory(settings: Optional[Union[str, Mapping]] = None) -> Callable: name = settings["name"] params = {k: v for k, v in settings.items() if k != "name"} - top = params.get("timeslice_op", "max") - if isinstance(top, str): - if top.lower() == "max": - - def timeslice_op(x: xr.DataArray) -> xr.DataArray: - from muse.timeslices import TIMESLICE - - return (x / (TIMESLICE / sum(TIMESLICE))).max("timeslice") - - elif top.lower() == "sum": - - def timeslice_op(x: xr.DataArray) -> xr.DataArray: - return x.sum("timeslice") - - else: - raise ValueError(f"Unknown timeslice transform {top}") - - params["timeslice_op"] = timeslice_op - investment = INVESTMENTS[name] def compute_investment( @@ -243,7 +224,6 @@ def adhoc_match_demand( technologies: xr.Dataset, constraints: list[Constraint], year: int, - timeslice_op: Optional[Callable[[xr.DataArray], xr.DataArray]] = None, ) -> xr.DataArray: from muse.demand_matching import demand_matching from muse.quantities import capacity_in_use, maximum_production @@ -265,7 +245,7 @@ def adhoc_match_demand( # Push disabled techs to last rank. # Any production assigned to them by the demand-matching algorithm will be removed. - if "timeslice" in costs.dims and timeslice_op is not None: + if "timeslice" in costs.dims: costs = costs.mean("timeslice").mean("asset") # timeslice_op(costs) minobj = costs.min() @@ -282,7 +262,7 @@ def adhoc_match_demand( capacity = capacity_in_use( production, technologies, year=year, technology=production.replacement ).drop_vars("technology") - if "timeslice" in capacity.dims and timeslice_op is not None: + if "timeslice" in capacity.dims: capacity = timeslice_op(capacity) result = xr.Dataset({"capacity": capacity, "production": production}) @@ -296,7 +276,6 @@ def scipy_match_demand( technologies: xr.Dataset, constraints: list[Constraint], year: Optional[int] = None, - timeslice_op: Optional[Callable[[xr.DataArray], xr.DataArray]] = None, **options, ) -> xr.DataArray: from logging import getLogger @@ -305,7 +284,7 @@ def scipy_match_demand( from muse.constraints import ScipyAdapter - if "timeslice" in costs.dims and timeslice_op is not None: + if "timeslice" in costs.dims: costs = timeslice_op(costs) # Select technodata for the current year @@ -354,7 +333,6 @@ def cvxopt_match_demand( technologies: xr.Dataset, constraints: list[Constraint], year: Optional[int] = None, - timeslice_op: Optional[Callable[[xr.DataArray], xr.DataArray]] = None, **options, ) -> xr.DataArray: from importlib import import_module @@ -370,9 +348,7 @@ def cvxopt_match_demand( techs = technologies def default_to_scipy(): - return scipy_match_demand( - costs, search_space, techs, constraints, timeslice_op=timeslice_op - ) + return scipy_match_demand(costs, search_space, techs, constraints) try: cvxopt = import_module("cvxopt") @@ -385,7 +361,7 @@ def default_to_scipy(): getLogger(__name__).critical(msg) return default_to_scipy() - if "timeslice" in costs.dims and timeslice_op is not None: + if "timeslice" in costs.dims: costs = timeslice_op(costs) timeslice = next(cs.timeslice for cs in constraints if "timeslice" in cs.dims) adapter = ScipyAdapter.factory( @@ -412,3 +388,9 @@ def default_to_scipy(): solution = cast(Callable[[np.ndarray], xr.Dataset], adapter.to_muse)(list(res["x"])) return solution + + +def timeslice_op(x: xr.DataArray) -> xr.DataArray: + from muse.timeslices import TIMESLICE + + return (x / (TIMESLICE / sum(TIMESLICE))).max("timeslice") From 19cf269ecbb9201bf790ba13c8fde781e4cb9a07 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 11 Oct 2024 11:07:20 +0100 Subject: [PATCH 25/92] Delete old convert_timeslice function --- src/muse/constraints.py | 12 +-- src/muse/costs.py | 14 +-- src/muse/demand_share.py | 12 +-- src/muse/examples.py | 4 +- src/muse/investments.py | 4 +- src/muse/objectives.py | 16 +-- src/muse/outputs/mca.py | 16 +-- src/muse/quantities.py | 16 +-- src/muse/readers/csv.py | 6 +- src/muse/sectors/preset_sector.py | 4 +- src/muse/sectors/sector.py | 8 +- src/muse/timeslices.py | 170 +----------------------------- tests/test_constraints.py | 4 +- tests/test_costs.py | 4 +- tests/test_demand_share.py | 8 +- tests/test_quantities.py | 24 ++--- 16 files changed, 78 insertions(+), 244 deletions(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index 8ae44a40b..3849bb2b1 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -449,7 +449,7 @@ def max_production( from muse.timeslices import ( TIMESLICE, QuantityType, - convert_timeslice_new, + convert_timeslice, ) if year is None: @@ -469,7 +469,7 @@ def max_production( .sel(**kwargs) .drop_vars("technology") ) - capacity = convert_timeslice_new( + capacity = convert_timeslice( techs.fixed_outputs * techs.utilization_factor, TIMESLICE, QuantityType.INTENSIVE, @@ -732,7 +732,7 @@ def minimum_service( from xarray import ones_like, zeros_like from muse.commodities import is_enduse - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice if "minimum_service_factor" not in technologies.data_vars: return None @@ -755,7 +755,7 @@ def minimum_service( .sel(**kwargs) .drop_vars("technology") ) - capacity = convert_timeslice_new( + capacity = convert_timeslice( techs.fixed_outputs * techs.minimum_service_factor, TIMESLICE, QuantityType.INTENSIVE, @@ -821,11 +821,11 @@ def lp_costs(technologies: xr.Dataset, costs: xr.DataArray) -> xr.Dataset: from xarray import zeros_like from muse.commodities import is_enduse - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice assert "year" not in technologies.dims - ts_costs = convert_timeslice_new(costs, TIMESLICE, QuantityType.INTENSIVE) + ts_costs = convert_timeslice(costs, TIMESLICE, QuantityType.INTENSIVE) selection = dict( commodity=is_enduse(technologies.comm_usage), technology=technologies.technology.isin(costs.replacement), diff --git a/src/muse/costs.py b/src/muse/costs.py index d0710c310..64fc1b979 100644 --- a/src/muse/costs.py +++ b/src/muse/costs.py @@ -13,7 +13,7 @@ from muse.commodities import is_enduse, is_fuel, is_material, is_pollutant from muse.quantities import consumption -from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new +from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice from muse.utilities import filter_input @@ -96,7 +96,7 @@ def net_present_value( raw_revenues = (production * prices_non_env * rates).sum(("commodity", "year")) # Cost of installed capacity - installed_capacity_costs = convert_timeslice_new( + installed_capacity_costs = convert_timeslice( techs.cap_par * (capacity**techs.cap_exp), TIMESLICE, QuantityType.INTENSIVE, @@ -120,7 +120,7 @@ def net_present_value( material_costs = (production * prices_material * rates).sum(("commodity", "year")) # Fixed and Variable costs - fixed_costs = convert_timeslice_new( + fixed_costs = convert_timeslice( techs.fix_par * (capacity**techs.fix_exp), TIMESLICE, QuantityType.INTENSIVE, @@ -260,7 +260,7 @@ def lifetime_levelized_cost_of_energy( fuels = is_fuel(technologies.comm_usage) # Cost of installed capacity - installed_capacity_costs = convert_timeslice_new( + installed_capacity_costs = convert_timeslice( techs.cap_par * (capacity**techs.cap_exp), TIMESLICE, QuantityType.INTENSIVE, @@ -284,7 +284,7 @@ def lifetime_levelized_cost_of_energy( material_costs = (production * prices_material * rates).sum(("commodity", "year")) # Fixed and Variable costs - fixed_costs = convert_timeslice_new( + fixed_costs = convert_timeslice( techs.fix_par * (capacity**techs.fix_exp), TIMESLICE, QuantityType.INTENSIVE, @@ -372,7 +372,7 @@ def annual_levelized_cost_of_energy( rates = techs.interest_rate / (1 - (1 + techs.interest_rate) ** (-life)) annualized_capital_costs = ( - convert_timeslice_new( + convert_timeslice( techs.cap_par * rates, TIMESLICE, QuantityType.INTENSIVE, @@ -381,7 +381,7 @@ def annual_levelized_cost_of_energy( ) o_and_e_costs = ( - convert_timeslice_new( + convert_timeslice( (techs.fix_par + techs.var_par), TIMESLICE, QuantityType.INTENSIVE, diff --git a/src/muse/demand_share.py b/src/muse/demand_share.py index 1212c1763..fb2321bd9 100644 --- a/src/muse/demand_share.py +++ b/src/muse/demand_share.py @@ -433,7 +433,7 @@ def unmet_forecasted_demand( ) -> xr.DataArray: """Forecast demand that cannot be serviced by non-decommissioned current assets.""" from muse.commodities import is_enduse - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice from muse.utilities import reduce_assets year = current_year + forecast @@ -442,7 +442,7 @@ def unmet_forecasted_demand( capacity = reduce_assets([u.assets.capacity.interp(year=year) for u in agents]) ts_capacity = cast( xr.DataArray, - convert_timeslice_new(capacity, TIMESLICE, QuantityType.INTENSIVE), + convert_timeslice(capacity, TIMESLICE, QuantityType.INTENSIVE), ) result = unmet_demand(smarket, ts_capacity, technologies, production) @@ -565,7 +565,7 @@ def new_consumption( """ from numpy import minimum - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice # Interpolate market to forecast year market = market.interp(year=[current_year, current_year + forecast]) @@ -576,7 +576,7 @@ def new_consumption( delta = (forecasted.consumption - current.consumption).clip(min=0) # Capacity in the forecast year - ts_capa = convert_timeslice_new( + ts_capa = convert_timeslice( capacity.interp(year=current_year + forecast), TIMESLICE, QuantityType.INTENSIVE, @@ -610,7 +610,7 @@ def new_and_retro_demands( from numpy import minimum from muse.production import factory as prod_factory - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice production_method = production if callable(production) else prod_factory(production) assert callable(production_method) @@ -619,7 +619,7 @@ def new_and_retro_demands( smarket: xr.Dataset = market.interp(year=[current_year, current_year + forecast]) # Split capacity between timeslices - ts_capa = convert_timeslice_new( + ts_capa = convert_timeslice( capacity.interp(year=[current_year, current_year + forecast]), TIMESLICE, QuantityType.INTENSIVE, diff --git a/src/muse/examples.py b/src/muse/examples.py index 8cd220de6..390a7be96 100644 --- a/src/muse/examples.py +++ b/src/muse/examples.py @@ -241,7 +241,7 @@ def matching_market(sector: str, model: str = "default") -> xr.Dataset: from muse.examples import sector as load_sector from muse.quantities import consumption, maximum_production from muse.sectors import Sector - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice from muse.utilities import agent_concatenation loaded_sector = cast(Sector, load_sector(sector, model)) @@ -250,7 +250,7 @@ def matching_market(sector: str, model: str = "default") -> xr.Dataset: market = xr.Dataset() production = cast( xr.DataArray, - convert_timeslice_new( + convert_timeslice( maximum_production(loaded_sector.technologies, assets.capacity), TIMESLICE, QuantityType.INTENSIVE, diff --git a/src/muse/investments.py b/src/muse/investments.py index bc731c906..923100a70 100644 --- a/src/muse/investments.py +++ b/src/muse/investments.py @@ -227,7 +227,7 @@ def adhoc_match_demand( ) -> xr.DataArray: from muse.demand_matching import demand_matching from muse.quantities import capacity_in_use, maximum_production - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice demand = next(c for c in constraints if c.name == "demand").b @@ -240,7 +240,7 @@ def adhoc_match_demand( commodity=demand.commodity, ).drop_vars("technology") if "timeslice" in demand.dims and "timeslice" not in max_prod.dims: - max_prod = convert_timeslice_new(max_prod, TIMESLICE, QuantityType.INTENSIVE) + max_prod = convert_timeslice(max_prod, TIMESLICE, QuantityType.INTENSIVE) # Push disabled techs to last rank. # Any production assigned to them by the demand-matching algorithm will be removed. diff --git a/src/muse/objectives.py b/src/muse/objectives.py index 284504cc9..0a9164135 100644 --- a/src/muse/objectives.py +++ b/src/muse/objectives.py @@ -383,11 +383,11 @@ def lifetime_levelized_cost_of_energy( due to a zero utilisation factor. """ from muse.costs import lifetime_levelized_cost_of_energy as LCOE - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice capacity = capacity_to_service_demand(technologies, demand) production = capacity * technologies.fixed_outputs * technologies.utilization_factor - production = convert_timeslice_new(production, TIMESLICE, QuantityType.INTENSIVE) + production = convert_timeslice(production, TIMESLICE, QuantityType.INTENSIVE) results = LCOE( technologies=technologies, @@ -413,11 +413,11 @@ def net_present_value( See :py:func:`muse.costs.net_present_value` for more details. """ from muse.costs import net_present_value as NPV - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice capacity = capacity_to_service_demand(technologies, demand) production = capacity * technologies.fixed_outputs * technologies.utilization_factor - production = convert_timeslice_new(production, TIMESLICE, QuantityType.INTENSIVE) + production = convert_timeslice(production, TIMESLICE, QuantityType.INTENSIVE) results = NPV( technologies=technologies, @@ -442,11 +442,11 @@ def net_present_cost( See :py:func:`muse.costs.net_present_cost` for more details. """ from muse.costs import net_present_cost as NPC - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice capacity = capacity_to_service_demand(technologies, demand) production = capacity * technologies.fixed_outputs * technologies.utilization_factor - production = convert_timeslice_new(production, TIMESLICE, QuantityType.INTENSIVE) + production = convert_timeslice(production, TIMESLICE, QuantityType.INTENSIVE) results = NPC( technologies=technologies, @@ -471,11 +471,11 @@ def equivalent_annual_cost( See :py:func:`muse.costs.equivalent_annual_cost` for more details. """ from muse.costs import equivalent_annual_cost as EAC - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice capacity = capacity_to_service_demand(technologies, demand) production = capacity * technologies.fixed_outputs * technologies.utilization_factor - production = convert_timeslice_new(production, TIMESLICE, QuantityType.INTENSIVE) + production = convert_timeslice(production, TIMESLICE, QuantityType.INTENSIVE) results = EAC( technologies=technologies, diff --git a/src/muse/outputs/mca.py b/src/muse/outputs/mca.py index 12f38fea1..8a6d7f7fe 100644 --- a/src/muse/outputs/mca.py +++ b/src/muse/outputs/mca.py @@ -38,7 +38,7 @@ def quantity( from muse.timeslices import ( TIMESLICE, QuantityType, - convert_timeslice_new, + convert_timeslice, drop_timeslice, ) from muse.utilities import multiindex_to_coords @@ -354,7 +354,7 @@ def sector_supply(sector: AbstractSector, market: xr.Dataset, **kwargs) -> pd.Da ] agent_market.loc[dict(commodity=excluded)] = 0 - result = convert_timeslice_new( + result = convert_timeslice( supply( agent_market, TIMESLICE, @@ -585,7 +585,7 @@ def sector_consumption( ] agent_market.loc[dict(commodity=excluded)] = 0 - production = convert_timeslice_new( + production = convert_timeslice( supply( agent_market, TIMESLICE, @@ -722,7 +722,7 @@ def sector_fuel_costs( ) commodity = is_fuel(technologies.comm_usage) - production = convert_timeslice_new( + production = convert_timeslice( supply( agent_market, TIMESLICE, @@ -781,7 +781,7 @@ def sector_capital_costs( technology=capacity.technology, ) result = data.cap_par * (capacity**data.cap_exp) - data_agent = convert_timeslice_new( + data_agent = convert_timeslice( result, TIMESLICE, QuantityType.INTENSIVE, @@ -842,7 +842,7 @@ def sector_emission_costs( i = (np.where(envs))[0][0] red_envs = envs[i].commodity.values prices = a.filter_input(market.prices, year=output_year, commodity=red_envs) - production = convert_timeslice_new( + production = convert_timeslice( supply( agent_market, TIMESLICE, @@ -916,7 +916,7 @@ def sector_lcoe(sector: AbstractSector, market: xr.Dataset, **kwargs) -> pd.Data demand = agent_market.consumption.sel(commodity=included) capacity = agent.filter_input(capacity_to_service_demand(demand, techs)) production = capacity * techs.fixed_outputs * techs.utilization_factor - production = convert_timeslice_new( + production = convert_timeslice( production, TIMESLICE, QuantityType.INTENSIVE, @@ -994,7 +994,7 @@ def sector_eac(sector: AbstractSector, market: xr.Dataset, **kwargs) -> pd.DataF demand = agent_market.consumption.sel(commodity=included) capacity = agent.filter_input(capacity_to_service_demand(demand, techs)) production = capacity * techs.fixed_outputs * techs.utilization_factor - production = convert_timeslice_new( + production = convert_timeslice( production, TIMESLICE, QuantityType.INTENSIVE, diff --git a/src/muse/quantities.py b/src/muse/quantities.py index 310c47d99..cb36a7734 100644 --- a/src/muse/quantities.py +++ b/src/muse/quantities.py @@ -150,7 +150,7 @@ def gross_margin( - non-environmental commodities OUTPUTS are related to revenues. """ from muse.commodities import is_enduse, is_pollutant - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice from muse.utilities import broadcast_techs tech = broadcast_techs( # type: ignore @@ -187,7 +187,7 @@ def gross_margin( enduses = is_enduse(technologies.comm_usage) # Variable costs depend on factors such as labour - variable_costs = convert_timeslice_new( + variable_costs = convert_timeslice( var_par * ((fixed_outputs.sel(commodity=enduses)).sum("commodity")) ** var_exp, TIMESLICE, QuantityType.INTENSIVE, @@ -269,7 +269,7 @@ def consumption( are not given, then flexible consumption is *not* considered. """ from muse.commodities import is_enduse, is_fuel - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice from muse.utilities import filter_with_template params = filter_with_template( @@ -283,7 +283,7 @@ def consumption( production = production.sel(commodity=is_enduse(comm_usage)).sum("commodity") if prices is not None and "timeslice" in prices.dims: - production = convert_timeslice_new( # type: ignore + production = convert_timeslice( # type: ignore production, TIMESLICE, QuantityType.INTENSIVE ) @@ -380,7 +380,7 @@ def demand_matched_production( """ from muse.costs import annual_levelized_cost_of_energy as ALCOE from muse.demand_matching import demand_matching - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice from muse.utilities import broadcast_techs technodata = cast(xr.Dataset, broadcast_techs(technologies, capacity)) @@ -388,7 +388,7 @@ def demand_matched_production( max_production = maximum_production(technodata, capacity, **filters) assert ("timeslice" in demand.dims) == ("timeslice" in cost.dims) if "timeslice" in demand.dims and "timeslice" not in max_production.dims: - max_production = convert_timeslice_new( + max_production = convert_timeslice( max_production, TIMESLICE, QuantityType.INTENSIVE ) return demand_matching(demand, cost, max_production) @@ -459,7 +459,7 @@ def costed_production( service is applied first. """ from muse.quantities import maximum_production - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice from muse.utilities import broadcast_techs technodata = cast(xr.Dataset, broadcast_techs(technologies, capacity)) @@ -475,7 +475,7 @@ def group_assets(x: xr.DataArray) -> xr.DataArray: return xr.Dataset(dict(x=x)).groupby("region").sum("asset").x ranking = costs.rank("asset") - maxprod = convert_timeslice_new( + maxprod = convert_timeslice( maximum_production(technodata, capacity), TIMESLICE, QuantityType.INTENSIVE, diff --git a/src/muse/readers/csv.py b/src/muse/readers/csv.py index 917936b0f..01c0d08c1 100644 --- a/src/muse/readers/csv.py +++ b/src/muse/readers/csv.py @@ -639,7 +639,7 @@ def read_initial_market( from muse.timeslices import ( TIMESLICE, QuantityType, - convert_timeslice_new, + convert_timeslice, ) # Projections must always be present @@ -663,10 +663,10 @@ def read_initial_market( getLogger(__name__).info("Base year import not provided. Set to zero.") base_year_import = xr.zeros_like(projections) - base_year_export = convert_timeslice_new( + base_year_export = convert_timeslice( base_year_export, TIMESLICE, QuantityType.INTENSIVE ) - base_year_import = convert_timeslice_new( + base_year_import = convert_timeslice( base_year_import, TIMESLICE, QuantityType.INTENSIVE ) base_year_export.name = "exports" diff --git a/src/muse/sectors/preset_sector.py b/src/muse/sectors/preset_sector.py index 12c9da18c..2a121fcfa 100644 --- a/src/muse/sectors/preset_sector.py +++ b/src/muse/sectors/preset_sector.py @@ -31,7 +31,7 @@ def factory(cls, name: str, settings: Any) -> PresetSector: read_timeslices, ) from muse.regressions import endogenous_demand - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice sector_conf = getattr(settings.sectors, name) presets = Dataset() @@ -121,7 +121,7 @@ def factory(cls, name: str, settings: Any) -> PresetSector: # add timeslice, if missing for component in {"supply", "consumption"}: if "timeslice" not in presets[component].dims: - presets[component] = convert_timeslice_new( + presets[component] = convert_timeslice( presets[component], TIMESLICE, QuantityType.INTENSIVE ) diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index f52410b33..37f029850 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -285,7 +285,7 @@ def market_variables(self, market: xr.Dataset, technologies: xr.Dataset) -> Any: from muse.commodities import is_pollutant from muse.costs import annual_levelized_cost_of_energy, supply_cost from muse.quantities import consumption - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice from muse.utilities import broadcast_techs years = market.year.values @@ -296,7 +296,7 @@ def market_variables(self, market: xr.Dataset, technologies: xr.Dataset) -> Any: market=market, capacity=capacity, technologies=technologies ) if "timeslice" in market.prices.dims and "timeslice" not in supply.dims: - supply = convert_timeslice_new(supply, TIMESLICE, QuantityType.INTENSIVE) + supply = convert_timeslice(supply, TIMESLICE, QuantityType.INTENSIVE) # Calculate consumption consume = consumption(technologies, supply, market.prices) @@ -391,7 +391,7 @@ def convert_market_timeslice( intensive: str | tuple[str] = "prices", ) -> xr.Dataset: """Converts market from one to another timeslice.""" - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice_new + from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice if isinstance(intensive, str): intensive = (intensive,) @@ -400,7 +400,7 @@ def convert_market_timeslice( intensives = market[list(timesliced.intersection(intensive))] if "timeslice" not in intensives.dims: - intensives = convert_timeslice_new( + intensives = convert_timeslice( intensives, TIMESLICE, QuantityType.EXTENSIVE, diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index 1caf8b2b7..6a6cb26bd 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -12,10 +12,9 @@ from enum import Enum, unique from typing import Optional, Union -import xarray as xr from numpy import ndarray from pandas import MultiIndex -from xarray import DataArray, Dataset +from xarray import DataArray from muse.readers import kebab_to_camel @@ -389,7 +388,7 @@ class QuantityType(Enum): EXTENSIVE = "extensive" -def convert_timeslice_new(x, ts, quantity): +def convert_timeslice(x, ts, quantity): from xarray import Coordinates if hasattr(x, "timeslice"): @@ -405,171 +404,6 @@ def convert_timeslice_new(x, ts, quantity): return extensive * (ts / ts.sum()) -def convert_timeslice( - x: Union[DataArray, Dataset], - ts: Union[DataArray, Dataset, MultiIndex], - quantity: Union[QuantityType, str] = QuantityType.EXTENSIVE, -) -> Union[DataArray, Dataset]: - '''Adjusts the timeslice of x to match that of ts. - - The conversion can be done in on of two ways, depending on whether the - quantity is extensive or intensive. See `QuantityType`. - - Example: - Lets define three timeslices from finest, to fine, to rough: - - >>> toml = """ - ... ["timeslices"] - ... winter.weekday.day = 5 - ... winter.weekday.night = 5 - ... winter.weekend.day = 2 - ... winter.weekend.night = 2 - ... summer.weekday.day = 5 - ... summer.weekday.night = 5 - ... summer.weekend.day = 2 - ... summer.weekend.night = 2 - ... level_names = ["semester", "week", "day"] - ... aggregates.allday = ["day", "night"] - ... aggregates.allweek = ["weekend", "weekday"] - ... aggregates.allyear = ["winter", "summer"] - ... """ - >>> from muse.timeslices import setup_module - >>> from muse.readers import read_timeslices - >>> setup_module(toml) - >>> finest_ts = read_timeslices() - >>> fine_ts = read_timeslices(dict(week=["allweek"])) - >>> rough_ts = read_timeslices(dict(semester=["allyear"], day=["allday"])) - - Lets also define to other data-arrays to demonstrate how we can play with - dimensions: - - >>> from numpy import array - >>> x = DataArray( - ... [5, 2, 3], - ... coords={'a': array([1, 2, 3], dtype="int64")}, - ... dims='a' - ... ) - >>> y = DataArray([1, 1, 2], coords={'b': ["d", "e", "f"]}, dims='b') - - We can now easily convert arrays with different dimensions. First, lets check - conversion from an array with no timeslices: - - >>> from xarray import ones_like - >>> from muse.timeslices import convert_timeslice, QuantityType - >>> z = convert_timeslice(x, finest_ts, QuantityType.EXTENSIVE) - >>> z.round(6) - Size: 192B - array([[0.892857, 0.357143, 0.535714], - [0.892857, 0.357143, 0.535714], - [0.357143, 0.142857, 0.214286], - [0.357143, 0.142857, 0.214286], - [0.892857, 0.357143, 0.535714], - [0.892857, 0.357143, 0.535714], - [0.357143, 0.142857, 0.214286], - [0.357143, 0.142857, 0.214286]]) - Coordinates: - * timeslice (timeslice) object 64B MultiIndex - * semester (timeslice) object 64B 'winter' 'winter' ... 'summer' 'summer' - * week (timeslice) object 64B 'weekday' 'weekday' ... 'weekend' - * day (timeslice) object 64B 'day' 'night' 'day' ... 'day' 'night' - * a (a) int64 24B 1 2 3 - >>> z.sum("timeslice") - Size: 24B - array([5., 2., 3.]) - Coordinates: - * a (a) int64 24B 1 2 3 - - As expected, the sum over timeslices recovers the original array. - - In the case of an intensive quantity without a timeslice dimension, the - operation does not do anything: - - >>> convert_timeslice([1, 2], rough_ts, QuantityType.INTENSIVE) - [1, 2] - - More interesting is the conversion between different timeslices: - - >>> from xarray import zeros_like - >>> zfine = x + y + zeros_like(fine_ts.timeslice, dtype=int) - >>> zrough = convert_timeslice(zfine, rough_ts) - >>> zrough.round(6) - Size: 144B - array([[[17.142857, 17.142857, 20. ], - [ 8.571429, 8.571429, 11.428571], - [11.428571, 11.428571, 14.285714]], - - [[ 6.857143, 6.857143, 8. ], - [ 3.428571, 3.428571, 4.571429], - [ 4.571429, 4.571429, 5.714286]]]) - Coordinates: - * timeslice (timeslice) object 16B MultiIndex - * semester (timeslice) object 16B 'allyear' 'allyear' - * week (timeslice) object 16B 'weekday' 'weekend' - * day (timeslice) object 16B 'allday' 'allday' - * a (a) int64 24B 1 2 3 - * b (b) >> from numpy import all - >>> all(zfine.sum("timeslice").round(6) == zrough.sum("timeslice").round(6)) - Size: 1B - array(True) - - Or that the ratio of weekdays to weekends makes sense: - >>> weekdays = ( - ... zrough - ... .unstack("timeslice") - ... .sel(week="weekday") - ... .stack(timeslice=["semester", "day"]) - ... .squeeze() - ... ) - >>> weekend = ( - ... zrough - ... .unstack("timeslice") - ... .sel(week="weekend") - ... .stack(timeslice=["semester", "day"]) - ... .squeeze() - ... ) - >>> bool(all((weekend * 5).round(6) == (weekdays * 2).round(6))) - True - ''' - finest = TIMESLICE - - if hasattr(ts, "timeslice"): - ts = ts.timeslice - - has_ts = "timeslice" in getattr(x, "dims", ()) - same_ts = has_ts and len(ts) == len(x.timeslice) and x.timeslice.equals(ts) - if same_ts or ((not has_ts) and quantity == QuantityType.INTENSIVE): - return x - - proj0 = timeslice_projector(x) - proj1 = timeslice_projector(ts) - - if quantity is QuantityType.EXTENSIVE: - finest = finest.rename(timeslice="finest_timeslice") - index = finest.get_index("finest_timeslice") - index = index.set_names(f"finest_{u}" for u in index.names) - mindex_coords = xr.Coordinates.from_pandas_multiindex(index, "finest_timeslice") - finest = finest.drop_vars(list(finest.coords)).assign_coords(mindex_coords) - proj0 = proj0 * finest - proj0 = proj0 / proj0.sum("finest_timeslice") - elif quantity is QuantityType.INTENSIVE: - proj1 = proj1 / proj1.sum("finest_timeslice") - - new_names = {"timeslice": "final_ts"} | { - c: f"{c}_ts" for c in proj1.timeslice.coords if c != "timeslice" - } - P = (proj1.rename(**new_names) * proj0).sum("finest_timeslice") - - final_names = {"final_ts": "timeslice"} | { - c: c.replace("_ts", "") for c in P.final_ts.coords if c != "final_ts" - } - return (P * x).sum("timeslice").rename(**final_names) - - def new_to_old_timeslice(ts: DataArray, ag_level="Month") -> dict: """Transforms timeslices defined as DataArray to a pandas dataframe. diff --git a/tests/test_constraints.py b/tests/test_constraints.py index e816c240e..0aee1af5c 100644 --- a/tests/test_constraints.py +++ b/tests/test_constraints.py @@ -72,11 +72,11 @@ def assets(residential): @fixture def market_demand(assets, technologies, market): from muse.quantities import maximum_production - from muse.timeslices import QuantityType, convert_timeslice_new + from muse.timeslices import QuantityType, convert_timeslice return 0.8 * maximum_production( technologies.interp(year=2025), - convert_timeslice_new( + convert_timeslice( assets.capacity.sel(year=2025).groupby("technology").sum("asset"), market, QuantityType.INTENSIVE, diff --git a/tests/test_costs.py b/tests/test_costs.py index 1bbefe0b1..6d90066ee 100644 --- a/tests/test_costs.py +++ b/tests/test_costs.py @@ -19,12 +19,12 @@ def _capacity(technologies, demand_share): @fixture def _production(technologies, _capacity, demand_share): - from muse.timeslices import QuantityType, convert_timeslice_new + from muse.timeslices import QuantityType, convert_timeslice production = ( _capacity * technologies.fixed_outputs * technologies.utilization_factor ) - production = convert_timeslice_new( + production = convert_timeslice( production, demand_share.timeslice, QuantityType.INTENSIVE ) return production diff --git a/tests/test_demand_share.py b/tests/test_demand_share.py index a8c282687..8e21b8cdf 100644 --- a/tests/test_demand_share.py +++ b/tests/test_demand_share.py @@ -19,10 +19,10 @@ def _matching_market(technologies, stock, timeslice): from numpy.random import random from muse.quantities import consumption, maximum_production - from muse.timeslices import QuantityType, convert_timeslice_new + from muse.timeslices import QuantityType, convert_timeslice market = xr.Dataset() - production = convert_timeslice_new( + production = convert_timeslice( maximum_production(technologies, stock.capacity), timeslice, QuantityType.INTENSIVE, @@ -126,7 +126,7 @@ def test_new_retro_split_zero_new_unmet(technologies, stock, matching_market): def test_new_retro_accounting_identity(technologies, stock, market): from muse.demand_share import new_and_retro_demands from muse.production import factory - from muse.timeslices import QuantityType, convert_timeslice_new + from muse.timeslices import QuantityType, convert_timeslice share = new_and_retro_demands( stock.capacity, market, technologies, current_year=2010, forecast=5 @@ -134,7 +134,7 @@ def test_new_retro_accounting_identity(technologies, stock, market): assert (share >= 0).all() production_method = factory() - serviced = convert_timeslice_new( + serviced = convert_timeslice( production_method( market.interp(year=2015), stock.capacity.interp(year=2015), technologies ) diff --git a/tests/test_quantities.py b/tests/test_quantities.py index 280209833..0479e1a1f 100644 --- a/tests/test_quantities.py +++ b/tests/test_quantities.py @@ -403,7 +403,7 @@ def test_demand_matched_production( ): from muse.commodities import CommodityUsage, is_enduse from muse.quantities import demand_matched_production, maximum_production - from muse.timeslices import QuantityType, convert_timeslice_new + from muse.timeslices import QuantityType, convert_timeslice # try and make sure we have a few more outputs than the default fixture technologies.comm_usage[:] = np.random.choice( @@ -414,7 +414,7 @@ def test_demand_matched_production( technologies.fixed_outputs[:] *= is_enduse(technologies.comm_usage) capacity = capacity.sel(year=capacity.year.min(), drop=True) - max_prod = convert_timeslice_new( + max_prod = convert_timeslice( maximum_production(technologies, capacity), demand.timeslice, QuantityType.INTENSIVE, @@ -434,7 +434,7 @@ def test_costed_production_exact_match(market, capacity, technologies): costed_production, maximum_production, ) - from muse.timeslices import QuantityType, convert_timeslice_new + from muse.timeslices import QuantityType, convert_timeslice from muse.utilities import broadcast_techs if set(capacity.region.values) != set(market.region.values): @@ -445,7 +445,7 @@ def test_costed_production_exact_match(market, capacity, technologies): costs = annual_levelized_cost_of_energy( prices=market.prices.sel(region=technodata.region), technologies=technodata ) - maxdemand = convert_timeslice_new( + maxdemand = convert_timeslice( xr.Dataset(dict(mp=maximum_production(technologies, capacity))) .groupby("region") .sum("asset") @@ -469,13 +469,13 @@ def test_costed_production_single_region(market, capacity, technologies): costed_production, maximum_production, ) - from muse.timeslices import QuantityType, convert_timeslice_new + from muse.timeslices import QuantityType, convert_timeslice from muse.utilities import broadcast_techs capacity = capacity.drop_vars("region") capacity["region"] = "USA" market = market.sel(region=[capacity.region.values]) - maxdemand = convert_timeslice_new( + maxdemand = convert_timeslice( maximum_production(technologies, capacity).sum("asset"), market, QuantityType.INTENSIVE, @@ -500,12 +500,12 @@ def test_costed_production_single_year(market, capacity, technologies): costed_production, maximum_production, ) - from muse.timeslices import QuantityType, convert_timeslice_new + from muse.timeslices import QuantityType, convert_timeslice from muse.utilities import broadcast_techs capacity = capacity.sel(year=2010) market = market.sel(year=2010) - maxdemand = convert_timeslice_new( + maxdemand = convert_timeslice( xr.Dataset(dict(mp=maximum_production(technologies, capacity))) .groupby("region") .sum("asset") @@ -533,7 +533,7 @@ def test_costed_production_over_capacity(market, capacity, technologies): costed_production, maximum_production, ) - from muse.timeslices import QuantityType, convert_timeslice_new + from muse.timeslices import QuantityType, convert_timeslice from muse.utilities import broadcast_techs capacity = capacity.isel(asset=[0, 1, 2]) @@ -541,7 +541,7 @@ def test_costed_production_over_capacity(market, capacity, technologies): capacity.region.values[: len(set(market.region.values))] = list( set(market.region.values) ) - maxdemand = convert_timeslice_new( + maxdemand = convert_timeslice( xr.Dataset(dict(mp=maximum_production(technologies, capacity))) .groupby("region") .sum("asset") @@ -569,7 +569,7 @@ def test_costed_production_with_minimum_service(market, capacity, technologies, costed_production, maximum_production, ) - from muse.timeslices import QuantityType, convert_timeslice_new + from muse.timeslices import QuantityType, convert_timeslice from muse.utilities import broadcast_techs if set(capacity.region.values) != set(market.region.values): @@ -580,7 +580,7 @@ def test_costed_production_with_minimum_service(market, capacity, technologies, technologies.utilization_factor.dims, rng.uniform(low=0.5, high=0.9, size=technologies.utilization_factor.shape), ) - maxprod = convert_timeslice_new( + maxprod = convert_timeslice( maximum_production(technologies, capacity), market, QuantityType.INTENSIVE ) minprod = maxprod * broadcast_techs(technologies.minimum_service_factor, maxprod) From 57c1c73806a7e282d12ea8b07a417a54243d2ad8 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 11 Oct 2024 11:31:16 +0100 Subject: [PATCH 26/92] Delete unused functions --- src/muse/readers/toml.py | 125 +---------------- src/muse/sectors/preset_sector.py | 4 +- src/muse/sectors/sector.py | 4 +- src/muse/timeslices.py | 223 +----------------------------- 4 files changed, 9 insertions(+), 347 deletions(-) diff --git a/src/muse/readers/toml.py b/src/muse/readers/toml.py index 5b357d636..9cc13697f 100644 --- a/src/muse/readers/toml.py +++ b/src/muse/readers/toml.py @@ -16,7 +16,6 @@ ) import numpy as np -import pandas as pd import xarray as xr from muse.decorators import SETTINGS_CHECKS, register_settings_check @@ -395,103 +394,7 @@ def read_settings( return convert(settings) -def read_ts_multiindex( - settings: Optional[Union[Mapping, str]] = None, - timeslice: Optional[xr.DataArray] = None, - transforms: Optional[dict[tuple, np.ndarray]] = None, -) -> pd.MultiIndex: - '''Read multiindex for a timeslice from TOML. - - Example: - The timeslices are read from ``timeslice_levels``. The levels (keyword) and - slice (list of values) correspond to the level, slices and slice aggregates - defined in the the ``timeslices`` section. - - >>> toml = """ - ... ["timeslices"] - ... winter.weekday.day = 5 - ... winter.weekday.night = 5 - ... winter.weekend.day = 2 - ... winter.weekend.night = 2 - ... winter.weekend.dusk = 1 - ... summer.weekday.day = 5 - ... summer.weekday.night = 5 - ... summer.weekend.day = 2 - ... summer.weekend.night = 2 - ... summer.weekend.dusk = 1 - ... level_names = ["semester", "week", "day"] - ... aggregates.allday = ["day", "night"] - ... [timeslice_levels] - ... day = ["dusk", "allday"] - ... """ - >>> from muse.timeslices import ( - ... reference_timeslice, aggregate_transforms - ... ) - >>> from muse.readers.toml import read_ts_multiindex - >>> ref = reference_timeslice(toml) - >>> transforms = aggregate_transforms(toml, ref) - >>> read_ts_multiindex(toml, ref, transforms) - MultiIndex([('summer', 'weekday', 'allday'), - ('summer', 'weekend', 'dusk'), - ('summer', 'weekend', 'allday'), - ('winter', 'weekday', 'allday'), - ('winter', 'weekend', 'dusk'), - ('winter', 'weekend', 'allday')], - names=['semester', 'week', 'day']) - - It is an error to refer to a level or a slice that does not exist: - - >>> read_ts_multiindex(dict(days=["dusk", "allday"]), ref, transforms) - Traceback (most recent call last): - ... - muse.readers.toml.IncorrectSettings: Unexpected level name(s): ... - >>> read_ts_multiindex(dict(day=["usk", "allday"]), ref, transforms) - Traceback (most recent call last): - ... - muse.readers.toml.IncorrectSettings: Unexpected slice(s): ... - ''' - from itertools import product - - from toml import loads - - from muse.timeslices import TIMESLICE, TRANSFORMS - - indices = (TIMESLICE if timeslice is None else timeslice).get_index("timeslice") - if transforms is None: - transforms = TRANSFORMS - if isinstance(settings, str): - settings = loads(settings) - elif settings is None: - return indices - elif not isinstance(settings, Mapping): - settings = undo_damage(settings) - settings = settings.get("timeslice_levels", settings) - assert isinstance(settings, Mapping) - if not set(settings).issubset(indices.names): - msg = "Unexpected level name(s): " + ", ".join( - set(settings).difference(indices.names) - ) - raise IncorrectSettings(msg) - levels = [ - settings.get(name, level) for name, level in zip(indices.names, indices.levels) - ] - levels = [[level] if isinstance(level, str) else level for level in levels] - for i, level in enumerate(levels): - known = [index[i] for index in transforms if len(index) > i] - unexpected = set(level).difference(known) - if unexpected: - raise IncorrectSettings("Unexpected slice(s): " + ", ".join(unexpected)) - return pd.MultiIndex.from_tuples( - [index for index in product(*levels) if index in transforms], - names=indices.names, - ) - - -def read_timeslices( - settings: Optional[Union[str, Mapping]] = None, - timeslice: Optional[xr.DataArray] = None, - transforms: Optional[dict[tuple, np.ndarray]] = None, -) -> xr.Dataset: +def read_timeslices() -> xr.Dataset: '''Reads timeslice levels and create resulting timeslice coordinate. Args: @@ -542,26 +445,10 @@ def read_timeslices( >>> assert set(ts.coords["week"].data) == {"weekday", "weekend"} >>> assert set(ts.coords["semester"].data) == {"summer", "winter"} ''' - from muse.timeslices import TIMESLICE, timeslice_projector - - if timeslice is None: - timeslice = TIMESLICE - if settings is None: - return xr.Dataset({"represent_hours": timeslice}).set_coords("represent_hours") - indices = read_ts_multiindex(settings, timeslice=timeslice, transforms=transforms) - units = xr.DataArray( - np.ones(len(indices)), coords={"timeslice": indices}, dims="timeslice" - ) - proj = timeslice_projector(units, finest=timeslice, transforms=transforms) - proj *= xr.DataArray( - timeslice.values, - coords={"finest_timeslice": proj.finest_timeslice}, - dims="finest_timeslice", - ) + from muse.timeslices import TIMESLICE - return xr.Dataset({"represent_hours": proj.sum("finest_timeslice")}).set_coords( - "represent_hours" - ) + timeslice = TIMESLICE + return xr.Dataset({"represent_hours": timeslice}).set_coords("represent_hours") def add_known_parameters(dd, u, parent=None): @@ -770,9 +657,7 @@ def check_time_slices(settings: dict) -> None: from muse.timeslices import setup_module setup_module(settings) - settings["timeslices"] = read_timeslices( - settings.get("mca", settings).get("timeslice_levels", None) - ).timeslice + settings["timeslices"] = read_timeslices().timeslice @register_settings_check(vary_name=False) diff --git a/src/muse/sectors/preset_sector.py b/src/muse/sectors/preset_sector.py index 2a121fcfa..64d79922c 100644 --- a/src/muse/sectors/preset_sector.py +++ b/src/muse/sectors/preset_sector.py @@ -36,9 +36,7 @@ def factory(cls, name: str, settings: Any) -> PresetSector: sector_conf = getattr(settings.sectors, name) presets = Dataset() - timeslice = read_timeslices( - getattr(sector_conf, "timeslice_levels", None) - ).timeslice + timeslice = read_timeslices().timeslice if getattr(sector_conf, "consumption_path", None) is not None: consumption = read_presets(sector_conf.consumption_path) presets["consumption"] = consumption.assign_coords(timeslice=timeslice) diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index 37f029850..be4c8bc5b 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -40,9 +40,7 @@ def factory(cls, name: str, settings: Any) -> Sector: raise RuntimeError(f"Empty 'subsectors' section in sector {name}") # Timeslices - timeslices = read_timeslices( - sector_settings.pop("timeslice_levels", None) - ).get_index("timeslice") + timeslices = read_timeslices().get_index("timeslice") # Read technologies technologies = read_technodata(settings, name, settings.time_framework) diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index 6a6cb26bd..15f28b2ca 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -2,15 +2,14 @@ __all__ = [ "reference_timeslice", - "aggregate_transforms", "convert_timeslice", - "timeslice_projector", + "drop_timeslice", "setup_module", ] from collections.abc import Mapping, Sequence from enum import Enum, unique -from typing import Optional, Union +from typing import Union from numpy import ndarray from pandas import MultiIndex @@ -144,228 +143,10 @@ def reference_timeslice( return DataArray(ts, coords={"timeslice": indices}, dims=name) -def aggregate_transforms( - settings: Optional[Union[Mapping, str]] = None, - timeslice: Optional[DataArray] = None, -) -> dict[tuple, ndarray]: - '''Creates dictionary of transforms for aggregate levels. - - The transforms are used to create the projectors towards the finest timeslice. - - Arguments: - timeslice: a ``DataArray`` with the timeslice dimension. - settings: A dictionary mapping the name of an aggregate with the values it - aggregates, or a string that toml will parse as such. If not given, only the - unit transforms are returned. - - Return: - A dictionary of transforms for each possible slice to it's corresponding finest - timeslices. - - Example: - >>> toml = """ - ... [timeslices] - ... spring.weekday = 5 - ... spring.weekend = 2 - ... autumn.weekday = 5 - ... autumn.weekend = 2 - ... winter.weekday = 5 - ... winter.weekend = 2 - ... summer.weekday = 5 - ... summer.weekend = 2 - ... - ... [timeslices.aggregates] - ... spautumn = ["spring", "autumn"] - ... week = ["weekday", "weekend"] - ... """ - >>> from muse.timeslices import reference_timeslice, aggregate_transforms - >>> ref = reference_timeslice(toml) - >>> transforms = aggregate_transforms(toml, ref) - >>> transforms[("spring", "weekend")] - array([0, 1, 0, 0, 0, 0, 0, 0]) - >>> transforms[("spautumn", "weekday")] - array([1, 0, 1, 0, 0, 0, 0, 0]) - >>> transforms[("autumn", "week")].T - array([0, 0, 1, 1, 0, 0, 0, 0]) - >>> transforms[("spautumn", "week")].T - array([1, 1, 1, 1, 0, 0, 0, 0]) - ''' - from itertools import product - - from numpy import identity, sum - from toml import loads - - if timeslice is None: - timeslice = TIMESLICE - if settings is None: - settings = {} - elif isinstance(settings, str): - settings = loads(settings) - - # get timeslice dimension - Id = identity(len(timeslice), dtype=int) - indices = timeslice.get_index("timeslice") - unitvecs: dict[tuple, ndarray] = {index: Id[i] for (i, index) in enumerate(indices)} - if "timeslices" in settings or "aggregates" in settings: - settings = settings.get("timeslices", settings).get("aggregates", {}) - assert isinstance(settings, Mapping) - - assert set(settings).intersection(unitvecs) == set() - levels = [list(level) for level in indices.levels] - for name, equivalent in settings.items(): - matching_levels = [ - set(level).issuperset(equivalent) for level in indices.levels - ] - if sum(matching_levels) == 0: - raise ValueError(f"Could not find matching level for {equivalent}") - elif sum(matching_levels) > 1: - raise ValueError(f"Found more than one matching level for {equivalent}") - level = matching_levels.index(True) - levels[level].append(name) - - result: dict[tuple, ndarray] = {} - for index in set(product(*levels)).difference(unitvecs): - if not any(level in settings for level in index): - continue - agglevels = set(product(*(settings.get(level, [level]) for level in index))) - result[index] = sum( - [unitvecs[agg] for agg in unitvecs if agg in agglevels], axis=0 - ) - result.update(unitvecs) - return result - - def setup_module(settings: Union[str, Mapping]): """Sets up module singletons.""" global TIMESLICE - global TRANSFORMS TIMESLICE = reference_timeslice(settings) - TRANSFORMS = aggregate_transforms(settings, TIMESLICE) - - -def timeslice_projector( - x: Union[DataArray, MultiIndex], -) -> DataArray: - '''Project time-slice to standardized finest time-slices. - - Returns a matrix from the input timeslice ``x`` to the ``finest`` timeslice, using - the input ``transforms``. The latter are a set of transforms that map indices from - one timeslice to indices in another. - - Example: - Lets define the following timeslices and aggregates: - - >>> toml = """ - ... ["timeslices"] - ... winter.weekday.day = 5 - ... winter.weekday.night = 5 - ... winter.weekend.day = 2 - ... winter.weekend.night = 2 - ... winter.weekend.dusk = 1 - ... summer.weekday.day = 5 - ... summer.weekday.night = 5 - ... summer.weekend.day = 2 - ... summer.weekend.night = 2 - ... summer.weekend.dusk = 1 - ... level_names = ["semester", "week", "day"] - ... aggregates.allday = ["day", "night"] - ... """ - >>> from muse.timeslices import ( - ... reference_timeslice, aggregate_transforms - ... ) - >>> ref = reference_timeslice(toml) - >>> transforms = aggregate_transforms(toml, ref) - >>> from pandas import MultiIndex - >>> input_ts = DataArray( - ... [1, 2, 3], - ... coords={ - ... "timeslice": MultiIndex.from_tuples( - ... [ - ... ("winter", "weekday", "allday"), - ... ("winter", "weekend", "dusk"), - ... ("summer", "weekend", "night"), - ... ], - ... names=ref.get_index("timeslice").names, - ... ), - ... }, - ... dims="timeslice" - ... ) - >>> input_ts # doctest: +SKIP - Size: 12B - array([1, 2, 3]) - Coordinates: - * timeslice (timeslice) object 24B MultiIndex - * semester (timeslice) object 24B 'winter' 'winter' 'summer' - * week (timeslice) object 24B 'weekday' 'weekend' 'weekend' - * day (timeslice) object 24B 'allday' 'dusk' 'night' - - The input timeslice does not have to be complete. In any case, we can now - compute a transform, i.e. a matrix that will take this timeslice and transform - it to the equivalent times in the finest timeslice: - - >>> from muse.timeslices import timeslice_projector - >>> timeslice_projector(input_ts, ref, transforms) # doctest: +SKIP - Size: 120B - array([[1, 0, 0], - [1, 0, 0], - [0, 0, 0], - [0, 0, 0], - [0, 1, 0], - [0, 0, 0], - [0, 0, 0], - [0, 0, 0], - [0, 0, 1], - [0, 0, 0]]) - Coordinates: - * finest_timeslice (finest_timeslice) object 80B MultiIndex - * finest_semester (finest_timeslice) object 80B 'winter' ... 'summer' - * finest_week (finest_timeslice) object 80B 'weekday' ... 'weekend' - * finest_day (finest_timeslice) object 80B 'day' 'night' ... 'dusk' - * timeslice (timeslice) object 24B MultiIndex - * semester (timeslice) object 24B 'winter' 'winter' 'summer' - * week (timeslice) object 24B 'weekday' 'weekend' 'weekend' - * day (timeslice) object 24B 'allday' 'dusk' 'night' - - It is possible to give as input an array which does not have a timeslice of its - own: - - >>> nots = DataArray([5.0, 1.0, 2.0], dims="a", coords={'a': [1, 2, 3]}) - >>> timeslice_projector(nots, ref, transforms).T # doctest: +SKIP - Size: 40B - array([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]) - Coordinates: - * finest_timeslice (finest_timeslice) object 80B MultiIndex - * finest_semester (finest_timeslice) object 80B 'winter' ... 'summer' - * finest_week (finest_timeslice) object 80B 'weekday' ... 'weekend' - * finest_day (finest_timeslice) object 80B 'day' 'night' ... 'dusk' - Dimensions without coordinates: timeslice - ''' - from numpy import concatenate, ones_like - from xarray import DataArray - - finest = TIMESLICE - transforms = TRANSFORMS - - index = finest.get_index("timeslice") - index = index.set_names(f"finest_{u}" for u in index.names) - - if isinstance(x, MultiIndex): - timeslices = x - elif "timeslice" in x.dims: - timeslices = x.get_index("timeslice") - else: - return DataArray( - ones_like(finest, dtype=int)[:, None], - coords={"finest_timeslice": index}, - dims=("finest_timeslice", "timeslice"), - ) - - return DataArray( - concatenate([transforms[index][:, None] for index in timeslices], axis=1), - coords={"finest_timeslice": index, "timeslice": timeslices}, - dims=("finest_timeslice", "timeslice"), - name="projector", - ) @unique From e4150e318b81a82c9752b3734f5eb270f720eab8 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 11 Oct 2024 12:00:11 +0100 Subject: [PATCH 27/92] Simplify timeslie import process --- src/muse/__init__.py | 1 - src/muse/readers/__init__.py | 2 +- src/muse/readers/csv.py | 32 ---------- src/muse/readers/toml.py | 75 ++-------------------- src/muse/sectors/legacy_sector.py | 4 +- src/muse/sectors/preset_sector.py | 3 +- src/muse/sectors/sector.py | 4 +- src/muse/timeslices.py | 18 ------ tests/test_timeslices.py | 100 +----------------------------- 9 files changed, 13 insertions(+), 226 deletions(-) diff --git a/src/muse/__init__.py b/src/muse/__init__.py index dff12e09a..35409e2a9 100644 --- a/src/muse/__init__.py +++ b/src/muse/__init__.py @@ -46,7 +46,6 @@ def _create_logger(color: bool = True): "read_technodictionary", "read_technologies", "read_timeslice_shares", - "read_csv_timeslices", "read_settings", "read_macro_drivers", "read_csv_agent_parameters", diff --git a/src/muse/readers/__init__.py b/src/muse/readers/__init__.py index 631cecdaf..0db43a479 100644 --- a/src/muse/readers/__init__.py +++ b/src/muse/readers/__init__.py @@ -2,7 +2,7 @@ from muse.defaults import DATA_DIRECTORY from muse.readers.csv import * # noqa: F403 -from muse.readers.toml import read_settings, read_timeslices # noqa: F401 +from muse.readers.toml import read_settings # noqa: F401 DEFAULT_SETTINGS_PATH = DATA_DIRECTORY / "default_settings.toml" """Default settings path.""" diff --git a/src/muse/readers/csv.py b/src/muse/readers/csv.py index 01c0d08c1..68f9084fb 100644 --- a/src/muse/readers/csv.py +++ b/src/muse/readers/csv.py @@ -5,7 +5,6 @@ "read_io_technodata", "read_initial_assets", "read_technologies", - "read_csv_timeslices", "read_global_commodities", "read_timeslice_shares", "read_csv_agent_parameters", @@ -414,35 +413,6 @@ def read_technologies( return result -def read_csv_timeslices(path: Union[str, Path], **kwargs) -> xr.DataArray: - """Reads timeslice information from input.""" - from logging import getLogger - - getLogger(__name__).info(f"Reading timeslices from {path}") - data = pd.read_csv(path, float_precision="high", **kwargs) - - def snake_case(string): - from re import sub - - result = sub(r"((?<=[a-z])[A-Z]|(? xr.Dataset: """Reads commodities information from input.""" from logging import getLogger @@ -500,8 +470,6 @@ def read_timeslice_shares( timeslice = timeslice.format(sector=sector) if isinstance(timeslice, (str, Path)) and not Path(timeslice).is_file(): timeslice = find_sectors_file(timeslice, sector, path) - if isinstance(timeslice, (str, Path)): - timeslice = read_csv_timeslices(timeslice, low_memory=False) share_path = find_sectors_file(f"TimesliceShare{sector}.csv", sector, path) getLogger(__name__).info(f"Reading timeslice shares from {share_path}") diff --git a/src/muse/readers/toml.py b/src/muse/readers/toml.py index 9cc13697f..55e97692c 100644 --- a/src/muse/readers/toml.py +++ b/src/muse/readers/toml.py @@ -359,6 +359,8 @@ def read_settings( Returns: A dictionary with the settings """ + from muse.timeslices import setup_module + getLogger(__name__).info("Reading MUSE settings") # The user data @@ -388,69 +390,16 @@ def read_settings( settings = add_known_parameters(default_settings, user_settings) settings = add_unknown_parameters(settings, user_settings) + # Set up timeslices + setup_module(settings) + settings.pop("timeslices", None) + # Finally, we run some checks to make sure all makes sense and files exist. validate_settings(settings) return convert(settings) -def read_timeslices() -> xr.Dataset: - '''Reads timeslice levels and create resulting timeslice coordinate. - - Args: - settings: TOML dictionary. It should contain a ``timeslice_levels`` section. - Otherwise, the timeslices will default to the global (finest) timeslices. - timeslice: Finest timeslices. Defaults to the global in - :py:mod:`~muse.timeslices`. If using the default, then this function - should be called *after* the timeslice module has been setup with a call to - :py:func:`~muse.timeslice.setup_module`. - transforms: Transforms from desired timeslices to the finest timeslice. Defaults - to the global in :py:mod:`~muse.timeslices`. If using the default, - then this function should be called *after* the timeslice module has been - setup with a call to :py:func:`~muse.timeslice.setup_module`. - - Returns: - A xr.Dataset with the timeslice coordinates. - - Example: - >>> toml = """ - ... ["timeslices"] - ... winter.weekday.day = 5 - ... winter.weekday.night = 5 - ... winter.weekend.day = 2 - ... winter.weekend.night = 2 - ... winter.weekend.dusk = 1 - ... summer.weekday.day = 5 - ... summer.weekday.night = 5 - ... summer.weekend.day = 2 - ... summer.weekend.night = 2 - ... summer.weekend.dusk = 1 - ... level_names = ["semester", "week", "day"] - ... aggregates.allday = ["day", "night"] - ... [timeslice_levels] - ... day = ["dusk", "allday"] - ... """ - >>> from muse.timeslices import ( - ... reference_timeslice, aggregate_transforms - ... ) - >>> from muse.readers.toml import read_timeslices - >>> ref = reference_timeslice(toml) - >>> transforms = aggregate_transforms(toml, ref) - >>> ts = read_timeslices(toml, ref, transforms) - >>> assert "semester" in ts.coords - >>> assert "week" in ts.coords - >>> assert "day" in ts.coords - >>> assert "represent_hours" in ts.coords - >>> assert set(ts.coords["day"].data) == {"dusk", "allday"} - >>> assert set(ts.coords["week"].data) == {"weekday", "weekend"} - >>> assert set(ts.coords["semester"].data) == {"summer", "winter"} - ''' - from muse.timeslices import TIMESLICE - - timeslice = TIMESLICE - return xr.Dataset({"represent_hours": timeslice}).set_coords("represent_hours") - - def add_known_parameters(dd, u, parent=None): """Function for updating the settings dictionary recursively. @@ -648,18 +597,6 @@ def check_iteration_control(settings: dict) -> None: assert settings["tolerance"] > 0, msg -@register_settings_check(vary_name=False) -def check_time_slices(settings: dict) -> None: - """Check the time slices. - - If there is no error, they are transformed into a xr.DataArray - """ - from muse.timeslices import setup_module - - setup_module(settings) - settings["timeslices"] = read_timeslices().timeslice - - @register_settings_check(vary_name=False) def check_global_data_files(settings: dict) -> None: """Checks that the global user files exist.""" diff --git a/src/muse/sectors/legacy_sector.py b/src/muse/sectors/legacy_sector.py index ad61cdc6f..b55b0028b 100644 --- a/src/muse/sectors/legacy_sector.py +++ b/src/muse/sectors/legacy_sector.py @@ -14,10 +14,10 @@ import pandas as pd from xarray import DataArray, Dataset -from muse.readers import read_csv_timeslices, read_initial_market +from muse.readers import read_initial_market from muse.sectors.abstract import AbstractSector from muse.sectors.register import register_sector -from muse.timeslices import QuantityType, new_to_old_timeslice +from muse.timeslices import QuantityType @dataclass diff --git a/src/muse/sectors/preset_sector.py b/src/muse/sectors/preset_sector.py index 64d79922c..c077ac024 100644 --- a/src/muse/sectors/preset_sector.py +++ b/src/muse/sectors/preset_sector.py @@ -28,7 +28,6 @@ def factory(cls, name: str, settings: Any) -> PresetSector: read_presets, read_regression_parameters, read_timeslice_shares, - read_timeslices, ) from muse.regressions import endogenous_demand from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice @@ -36,7 +35,7 @@ def factory(cls, name: str, settings: Any) -> PresetSector: sector_conf = getattr(settings.sectors, name) presets = Dataset() - timeslice = read_timeslices().timeslice + timeslice = TIMESLICE.timeslice if getattr(sector_conf, "consumption_path", None) is not None: consumption = read_presets(sector_conf.consumption_path) presets["consumption"] = consumption.assign_coords(timeslice=timeslice) diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index be4c8bc5b..18aafc3a1 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -26,9 +26,9 @@ def factory(cls, name: str, settings: Any) -> Sector: from muse.interactions import factory as interaction_factory from muse.outputs.sector import factory as ofactory from muse.production import factory as pfactory - from muse.readers import read_timeslices from muse.readers.toml import read_technodata from muse.utilities import nametuple_to_dict + from muse.timeslices import TIMESLICE # Read sector settings sector_settings = getattr(settings.sectors, name)._asdict() @@ -40,7 +40,7 @@ def factory(cls, name: str, settings: Any) -> Sector: raise RuntimeError(f"Empty 'subsectors' section in sector {name}") # Timeslices - timeslices = read_timeslices().get_index("timeslice") + timeslices = TIMESLICE.timeslice # Read technologies technologies = read_technodata(settings, name, settings.time_framework) diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index 15f28b2ca..fefd3c5d1 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -185,24 +185,6 @@ def convert_timeslice(x, ts, quantity): return extensive * (ts / ts.sum()) -def new_to_old_timeslice(ts: DataArray, ag_level="Month") -> dict: - """Transforms timeslices defined as DataArray to a pandas dataframe. - - This function is used in the LegacySector class to adapt the new MCA timeslices to - the format required by the old sectors. - """ - length = len(ts.month.values) - converted_ts = { - "Month": [kebab_to_camel(w) for w in ts.month.values], - "Day": [kebab_to_camel(w) for w in ts.day.values], - "Hour": [kebab_to_camel(w) for w in ts.hour.values], - "RepresentHours": list(ts.represent_hours.values.astype(float)), - "SN": list(range(1, length + 1)), - "AgLevel": [ag_level] * length, - } - return converted_ts - - def drop_timeslice(data: DataArray) -> DataArray: """Drop the timeslice variable from a DataArray. diff --git a/tests/test_timeslices.py b/tests/test_timeslices.py index b4fcd1b6c..c8de264dc 100644 --- a/tests/test_timeslices.py +++ b/tests/test_timeslices.py @@ -1,6 +1,6 @@ """Test timeslice utilities.""" -from pytest import approx, fixture +from pytest import fixture from xarray import DataArray @@ -31,13 +31,6 @@ def reference(toml): return reference_timeslice(toml) -@fixture -def transforms(toml, reference): - from muse.timeslices import aggregate_transforms - - return aggregate_transforms(toml, reference) - - @fixture def timeslice_dataarray(reference): from pandas import MultiIndex @@ -111,97 +104,6 @@ def test_no_overlap(): ) -def test_aggregate_transforms_no_aggregates(): - from itertools import product - - from numpy import ndarray, zeros - - from muse.timeslices import aggregate_transforms, reference_timeslice - - reference = reference_timeslice( - """ - [timeslices] - spring.weekday = 396 - spring.weekend = 396 - autumn.weekday = 396 - autumn.weekend = 156 - """ - ) - - vectors = aggregate_transforms(timeslice=reference) - assert isinstance(vectors, dict) - assert set(vectors) == set(product(["spring", "autumn"], ["weekday", "weekend"])) - for i in range(reference.shape[0]): - index = reference.timeslice[i].values.tolist() - vector = vectors[index] - assert isinstance(vector, ndarray) - expected = zeros(reference.shape, dtype=int) - expected[i] = 1 - assert vector == approx(expected) - - -def test_aggregate_transforms_with_aggregates(): - from itertools import product - - from toml import loads - - from muse.timeslices import aggregate_transforms, reference_timeslice - - toml = loads( - """ - [timeslices] - spring.weekday.day = 396 - spring.weekday.night = 396 - spring.weekend.day = 156 - spring.weekend.night = 156 - summer.weekday.day = 396 - summer.weekday.night = 396 - summer.weekend.day = 156 - summer.weekend.night = 156 - autumn.weekday.day = 396 - autumn.weekday.night = 396 - autumn.weekend.day = 156 - autumn.weekend.night = 156 - winter.weekday.day = 396 - winter.weekday.night = 396 - winter.weekend.day = 156 - winter.weekend.night = 156 - - [timeslices.aggregates] - springautumn = ["spring", "autumn"] - allday = ["day", "night"] - week = ["weekday", "weekend"] - """ - ) - reference = reference_timeslice(toml) - - vectors = aggregate_transforms(toml, reference) - assert isinstance(vectors, dict) - assert set(vectors) == set( - product( - ["winter", "spring", "summer", "autumn", "springautumn"], - ["weekend", "weekday", "week"], - ["day", "night", "allday"], - ) - ) - - def to_bitstring(x): - return "".join(x.astype(str)) - - assert to_bitstring(vectors[("spring", "weekday", "night")]) == "0100000000000000" - assert to_bitstring(vectors[("autumn", "weekday", "night")]) == "0000000001000000" - assert to_bitstring(vectors[("spring", "weekend", "night")]) == "0001000000000000" - assert to_bitstring(vectors[("autumn", "weekend", "night")]) == "0000000000010000" - assert ( - to_bitstring(vectors[("springautumn", "weekday", "night")]) - == "0100000001000000" - ) - assert to_bitstring(vectors[("spring", "week", "night")]) == "0101000000000000" - assert ( - to_bitstring(vectors[("springautumn", "week", "night")]) == "0101000001010000" - ) - - def test_drop_timeslice(timeslice_dataarray): from muse.timeslices import drop_timeslice From dc8b8b862ac2248b2dabc39322873463a2d6991c Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 11 Oct 2024 12:00:34 +0100 Subject: [PATCH 28/92] Formatting --- src/muse/sectors/sector.py | 2 +- src/muse/timeslices.py | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index 18aafc3a1..e9a310c6f 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -27,8 +27,8 @@ def factory(cls, name: str, settings: Any) -> Sector: from muse.outputs.sector import factory as ofactory from muse.production import factory as pfactory from muse.readers.toml import read_technodata - from muse.utilities import nametuple_to_dict from muse.timeslices import TIMESLICE + from muse.utilities import nametuple_to_dict # Read sector settings sector_settings = getattr(settings.sectors, name)._asdict() diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index fefd3c5d1..a462c661a 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -15,8 +15,6 @@ from pandas import MultiIndex from xarray import DataArray -from muse.readers import kebab_to_camel - TIMESLICE: DataArray = None # type: ignore """Array with the finest timeslice.""" TRANSFORMS: dict[tuple, ndarray] = None # type: ignore From 02884599498c86600f8f6e775ea4943dd55724ae Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 11 Oct 2024 14:03:38 +0100 Subject: [PATCH 29/92] Default arguments for convert_timeslice --- src/muse/constraints.py | 16 ++++---------- src/muse/costs.py | 14 +----------- src/muse/demand_share.py | 12 ++++------- src/muse/examples.py | 4 +--- src/muse/investments.py | 4 ++-- src/muse/objectives.py | 16 +++++++------- src/muse/outputs/mca.py | 36 +++++++++++-------------------- src/muse/quantities.py | 20 ++++++----------- src/muse/readers/csv.py | 14 +++--------- src/muse/sectors/preset_sector.py | 6 ++---- src/muse/sectors/sector.py | 4 ++-- src/muse/timeslices.py | 5 ++++- 12 files changed, 49 insertions(+), 102 deletions(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index 3849bb2b1..6a0b12bb6 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -446,11 +446,7 @@ def max_production( from xarray import ones_like, zeros_like from muse.commodities import is_enduse - from muse.timeslices import ( - TIMESLICE, - QuantityType, - convert_timeslice, - ) + from muse.timeslices import convert_timeslice if year is None: year = int(market.year.min()) @@ -471,8 +467,6 @@ def max_production( ) capacity = convert_timeslice( techs.fixed_outputs * techs.utilization_factor, - TIMESLICE, - QuantityType.INTENSIVE, ) if "asset" not in capacity.dims and "asset" in search_space.dims: capacity = capacity.expand_dims(asset=search_space.asset) @@ -732,7 +726,7 @@ def minimum_service( from xarray import ones_like, zeros_like from muse.commodities import is_enduse - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import convert_timeslice if "minimum_service_factor" not in technologies.data_vars: return None @@ -757,8 +751,6 @@ def minimum_service( ) capacity = convert_timeslice( techs.fixed_outputs * techs.minimum_service_factor, - TIMESLICE, - QuantityType.INTENSIVE, ) if "asset" not in capacity.dims: capacity = capacity.expand_dims(asset=search_space.asset) @@ -821,11 +813,11 @@ def lp_costs(technologies: xr.Dataset, costs: xr.DataArray) -> xr.Dataset: from xarray import zeros_like from muse.commodities import is_enduse - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import convert_timeslice assert "year" not in technologies.dims - ts_costs = convert_timeslice(costs, TIMESLICE, QuantityType.INTENSIVE) + ts_costs = convert_timeslice(costs) selection = dict( commodity=is_enduse(technologies.comm_usage), technology=technologies.technology.isin(costs.replacement), diff --git a/src/muse/costs.py b/src/muse/costs.py index 64fc1b979..bb2e3493c 100644 --- a/src/muse/costs.py +++ b/src/muse/costs.py @@ -13,7 +13,7 @@ from muse.commodities import is_enduse, is_fuel, is_material, is_pollutant from muse.quantities import consumption -from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice +from muse.timeslices import convert_timeslice from muse.utilities import filter_input @@ -98,8 +98,6 @@ def net_present_value( # Cost of installed capacity installed_capacity_costs = convert_timeslice( techs.cap_par * (capacity**techs.cap_exp), - TIMESLICE, - QuantityType.INTENSIVE, ) # Cost related to environmental products @@ -122,8 +120,6 @@ def net_present_value( # Fixed and Variable costs fixed_costs = convert_timeslice( techs.fix_par * (capacity**techs.fix_exp), - TIMESLICE, - QuantityType.INTENSIVE, ) variable_costs = techs.var_par * ( (production.sel(commodity=products).sum("commodity")) ** techs.var_exp @@ -262,8 +258,6 @@ def lifetime_levelized_cost_of_energy( # Cost of installed capacity installed_capacity_costs = convert_timeslice( techs.cap_par * (capacity**techs.cap_exp), - TIMESLICE, - QuantityType.INTENSIVE, ) # Cost related to environmental products @@ -286,8 +280,6 @@ def lifetime_levelized_cost_of_energy( # Fixed and Variable costs fixed_costs = convert_timeslice( techs.fix_par * (capacity**techs.fix_exp), - TIMESLICE, - QuantityType.INTENSIVE, ) variable_costs = ( techs.var_par * production.sel(commodity=products) ** techs.var_exp @@ -374,8 +366,6 @@ def annual_levelized_cost_of_energy( annualized_capital_costs = ( convert_timeslice( techs.cap_par * rates, - TIMESLICE, - QuantityType.INTENSIVE, ) / techs.utilization_factor ) @@ -383,8 +373,6 @@ def annual_levelized_cost_of_energy( o_and_e_costs = ( convert_timeslice( (techs.fix_par + techs.var_par), - TIMESLICE, - QuantityType.INTENSIVE, ) / techs.utilization_factor ) diff --git a/src/muse/demand_share.py b/src/muse/demand_share.py index fb2321bd9..321899916 100644 --- a/src/muse/demand_share.py +++ b/src/muse/demand_share.py @@ -433,7 +433,7 @@ def unmet_forecasted_demand( ) -> xr.DataArray: """Forecast demand that cannot be serviced by non-decommissioned current assets.""" from muse.commodities import is_enduse - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import convert_timeslice from muse.utilities import reduce_assets year = current_year + forecast @@ -442,7 +442,7 @@ def unmet_forecasted_demand( capacity = reduce_assets([u.assets.capacity.interp(year=year) for u in agents]) ts_capacity = cast( xr.DataArray, - convert_timeslice(capacity, TIMESLICE, QuantityType.INTENSIVE), + convert_timeslice(capacity), ) result = unmet_demand(smarket, ts_capacity, technologies, production) @@ -565,7 +565,7 @@ def new_consumption( """ from numpy import minimum - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import convert_timeslice # Interpolate market to forecast year market = market.interp(year=[current_year, current_year + forecast]) @@ -578,8 +578,6 @@ def new_consumption( # Capacity in the forecast year ts_capa = convert_timeslice( capacity.interp(year=current_year + forecast), - TIMESLICE, - QuantityType.INTENSIVE, ) assert isinstance(ts_capa, xr.DataArray) @@ -610,7 +608,7 @@ def new_and_retro_demands( from numpy import minimum from muse.production import factory as prod_factory - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import convert_timeslice production_method = production if callable(production) else prod_factory(production) assert callable(production_method) @@ -621,8 +619,6 @@ def new_and_retro_demands( # Split capacity between timeslices ts_capa = convert_timeslice( capacity.interp(year=[current_year, current_year + forecast]), - TIMESLICE, - QuantityType.INTENSIVE, ) assert isinstance(ts_capa, xr.DataArray) diff --git a/src/muse/examples.py b/src/muse/examples.py index 390a7be96..236ac9f38 100644 --- a/src/muse/examples.py +++ b/src/muse/examples.py @@ -241,7 +241,7 @@ def matching_market(sector: str, model: str = "default") -> xr.Dataset: from muse.examples import sector as load_sector from muse.quantities import consumption, maximum_production from muse.sectors import Sector - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import convert_timeslice from muse.utilities import agent_concatenation loaded_sector = cast(Sector, load_sector(sector, model)) @@ -252,8 +252,6 @@ def matching_market(sector: str, model: str = "default") -> xr.Dataset: xr.DataArray, convert_timeslice( maximum_production(loaded_sector.technologies, assets.capacity), - TIMESLICE, - QuantityType.INTENSIVE, ), ) market["supply"] = production.sum("asset") diff --git a/src/muse/investments.py b/src/muse/investments.py index 923100a70..6fed448a2 100644 --- a/src/muse/investments.py +++ b/src/muse/investments.py @@ -227,7 +227,7 @@ def adhoc_match_demand( ) -> xr.DataArray: from muse.demand_matching import demand_matching from muse.quantities import capacity_in_use, maximum_production - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import convert_timeslice demand = next(c for c in constraints if c.name == "demand").b @@ -240,7 +240,7 @@ def adhoc_match_demand( commodity=demand.commodity, ).drop_vars("technology") if "timeslice" in demand.dims and "timeslice" not in max_prod.dims: - max_prod = convert_timeslice(max_prod, TIMESLICE, QuantityType.INTENSIVE) + max_prod = convert_timeslice(max_prod) # Push disabled techs to last rank. # Any production assigned to them by the demand-matching algorithm will be removed. diff --git a/src/muse/objectives.py b/src/muse/objectives.py index 0a9164135..babe2593a 100644 --- a/src/muse/objectives.py +++ b/src/muse/objectives.py @@ -383,11 +383,11 @@ def lifetime_levelized_cost_of_energy( due to a zero utilisation factor. """ from muse.costs import lifetime_levelized_cost_of_energy as LCOE - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import convert_timeslice capacity = capacity_to_service_demand(technologies, demand) production = capacity * technologies.fixed_outputs * technologies.utilization_factor - production = convert_timeslice(production, TIMESLICE, QuantityType.INTENSIVE) + production = convert_timeslice(production) results = LCOE( technologies=technologies, @@ -413,11 +413,11 @@ def net_present_value( See :py:func:`muse.costs.net_present_value` for more details. """ from muse.costs import net_present_value as NPV - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import convert_timeslice capacity = capacity_to_service_demand(technologies, demand) production = capacity * technologies.fixed_outputs * technologies.utilization_factor - production = convert_timeslice(production, TIMESLICE, QuantityType.INTENSIVE) + production = convert_timeslice(production) results = NPV( technologies=technologies, @@ -442,11 +442,11 @@ def net_present_cost( See :py:func:`muse.costs.net_present_cost` for more details. """ from muse.costs import net_present_cost as NPC - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import convert_timeslice capacity = capacity_to_service_demand(technologies, demand) production = capacity * technologies.fixed_outputs * technologies.utilization_factor - production = convert_timeslice(production, TIMESLICE, QuantityType.INTENSIVE) + production = convert_timeslice(production) results = NPC( technologies=technologies, @@ -471,11 +471,11 @@ def equivalent_annual_cost( See :py:func:`muse.costs.equivalent_annual_cost` for more details. """ from muse.costs import equivalent_annual_cost as EAC - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import convert_timeslice capacity = capacity_to_service_demand(technologies, demand) production = capacity * technologies.fixed_outputs * technologies.utilization_factor - production = convert_timeslice(production, TIMESLICE, QuantityType.INTENSIVE) + production = convert_timeslice(production) results = EAC( technologies=technologies, diff --git a/src/muse/outputs/mca.py b/src/muse/outputs/mca.py index 8a6d7f7fe..128c00ea7 100644 --- a/src/muse/outputs/mca.py +++ b/src/muse/outputs/mca.py @@ -35,12 +35,7 @@ def quantity( from muse.outputs.sector import market_quantity from muse.registration import registrator from muse.sectors import AbstractSector -from muse.timeslices import ( - TIMESLICE, - QuantityType, - convert_timeslice, - drop_timeslice, -) +from muse.timeslices import convert_timeslice, drop_timeslice from muse.utilities import multiindex_to_coords OUTPUT_QUANTITY_SIGNATURE = Callable[ @@ -339,6 +334,7 @@ def sector_supply(sector: AbstractSector, market: xr.Dataset, **kwargs) -> pd.Da if len(techs) > 0: for a in agents: output_year = a.year - a.forecast + capacity = a.filter_input(a.assets.capacity, year=output_year).fillna(0.0) technologies = a.filter_input(techs, year=output_year).fillna(0.0) agent_market = market.sel(year=output_year).copy() agent_market["consumption"] = drop_timeslice( @@ -357,11 +353,9 @@ def sector_supply(sector: AbstractSector, market: xr.Dataset, **kwargs) -> pd.Da result = convert_timeslice( supply( agent_market, - TIMESLICE, + capacity, technologies, ), - agent_market["consumption"].timeslice, - QuantityType.INTENSIVE, ) if "year" in result.dims: @@ -570,6 +564,7 @@ def sector_consumption( if len(techs) > 0: for a in agents: output_year = a.year - a.forecast + capacity = a.filter_input(a.assets.capacity, year=output_year).fillna(0.0) technologies = a.filter_input(techs, year=output_year).fillna(0.0) agent_market = market.sel(year=output_year).copy() agent_market["consumption"] = drop_timeslice( @@ -588,11 +583,9 @@ def sector_consumption( production = convert_timeslice( supply( agent_market, - TIMESLICE, + capacity, technologies, ), - agent_market["consumption"].timeslice, - QuantityType.INTENSIVE, ) prices = a.filter_input(market.prices, year=output_year) result = consumption( @@ -722,14 +715,17 @@ def sector_fuel_costs( ) commodity = is_fuel(technologies.comm_usage) + capacity = a.filter_input( + a.assets.capacity, + year=output_year, + ).fillna(0.0) + production = convert_timeslice( supply( agent_market, - TIMESLICE, + capacity, technologies, ), - agent_market["consumption"].timeslice, - QuantityType.INTENSIVE, ) prices = a.filter_input(market.prices, year=output_year) @@ -783,8 +779,6 @@ def sector_capital_costs( result = data.cap_par * (capacity**data.cap_exp) data_agent = convert_timeslice( result, - TIMESLICE, - QuantityType.INTENSIVE, ) data_agent["agent"] = a.name data_agent["category"] = a.category @@ -845,11 +839,9 @@ def sector_emission_costs( production = convert_timeslice( supply( agent_market, - TIMESLICE, + capacity, technologies, ), - agent_market["consumption"].timeslice, - QuantityType.INTENSIVE, ) total = production.sel(commodity=enduses).sum("commodity") data_agent = total * (allemissions * prices).sum("commodity") @@ -918,8 +910,6 @@ def sector_lcoe(sector: AbstractSector, market: xr.Dataset, **kwargs) -> pd.Data production = capacity * techs.fixed_outputs * techs.utilization_factor production = convert_timeslice( production, - TIMESLICE, - QuantityType.INTENSIVE, ) result = LCOE( @@ -996,8 +986,6 @@ def sector_eac(sector: AbstractSector, market: xr.Dataset, **kwargs) -> pd.DataF production = capacity * techs.fixed_outputs * techs.utilization_factor production = convert_timeslice( production, - TIMESLICE, - QuantityType.INTENSIVE, ) result = EAC( diff --git a/src/muse/quantities.py b/src/muse/quantities.py index cb36a7734..12dd7e8f8 100644 --- a/src/muse/quantities.py +++ b/src/muse/quantities.py @@ -150,7 +150,7 @@ def gross_margin( - non-environmental commodities OUTPUTS are related to revenues. """ from muse.commodities import is_enduse, is_pollutant - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import convert_timeslice from muse.utilities import broadcast_techs tech = broadcast_techs( # type: ignore @@ -189,8 +189,6 @@ def gross_margin( # Variable costs depend on factors such as labour variable_costs = convert_timeslice( var_par * ((fixed_outputs.sel(commodity=enduses)).sum("commodity")) ** var_exp, - TIMESLICE, - QuantityType.INTENSIVE, ) # The individual prices are selected @@ -269,7 +267,7 @@ def consumption( are not given, then flexible consumption is *not* considered. """ from muse.commodities import is_enduse, is_fuel - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import convert_timeslice from muse.utilities import filter_with_template params = filter_with_template( @@ -283,9 +281,7 @@ def consumption( production = production.sel(commodity=is_enduse(comm_usage)).sum("commodity") if prices is not None and "timeslice" in prices.dims: - production = convert_timeslice( # type: ignore - production, TIMESLICE, QuantityType.INTENSIVE - ) + production = convert_timeslice(production) # type: ignore params_fuels = is_fuel(params.comm_usage) consumption = production * params.fixed_inputs.where(params_fuels, 0) @@ -380,7 +376,7 @@ def demand_matched_production( """ from muse.costs import annual_levelized_cost_of_energy as ALCOE from muse.demand_matching import demand_matching - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import convert_timeslice from muse.utilities import broadcast_techs technodata = cast(xr.Dataset, broadcast_techs(technologies, capacity)) @@ -388,9 +384,7 @@ def demand_matched_production( max_production = maximum_production(technodata, capacity, **filters) assert ("timeslice" in demand.dims) == ("timeslice" in cost.dims) if "timeslice" in demand.dims and "timeslice" not in max_production.dims: - max_production = convert_timeslice( - max_production, TIMESLICE, QuantityType.INTENSIVE - ) + max_production = convert_timeslice(max_production) return demand_matching(demand, cost, max_production) @@ -459,7 +453,7 @@ def costed_production( service is applied first. """ from muse.quantities import maximum_production - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import convert_timeslice from muse.utilities import broadcast_techs technodata = cast(xr.Dataset, broadcast_techs(technologies, capacity)) @@ -477,8 +471,6 @@ def group_assets(x: xr.DataArray) -> xr.DataArray: ranking = costs.rank("asset") maxprod = convert_timeslice( maximum_production(technodata, capacity), - TIMESLICE, - QuantityType.INTENSIVE, ) commodity = (maxprod > 0).any([i for i in maxprod.dims if i != "commodity"]) commodity = commodity.drop_vars( diff --git a/src/muse/readers/csv.py b/src/muse/readers/csv.py index 68f9084fb..89b4f60ec 100644 --- a/src/muse/readers/csv.py +++ b/src/muse/readers/csv.py @@ -604,11 +604,7 @@ def read_initial_market( """Read projections, import and export csv files.""" from logging import getLogger - from muse.timeslices import ( - TIMESLICE, - QuantityType, - convert_timeslice, - ) + from muse.timeslices import TIMESLICE, convert_timeslice # Projections must always be present if isinstance(projections, (str, Path)): @@ -631,12 +627,8 @@ def read_initial_market( getLogger(__name__).info("Base year import not provided. Set to zero.") base_year_import = xr.zeros_like(projections) - base_year_export = convert_timeslice( - base_year_export, TIMESLICE, QuantityType.INTENSIVE - ) - base_year_import = convert_timeslice( - base_year_import, TIMESLICE, QuantityType.INTENSIVE - ) + base_year_export = convert_timeslice(base_year_export) + base_year_import = convert_timeslice(base_year_import) base_year_export.name = "exports" base_year_import.name = "imports" diff --git a/src/muse/sectors/preset_sector.py b/src/muse/sectors/preset_sector.py index c077ac024..04c6e48ca 100644 --- a/src/muse/sectors/preset_sector.py +++ b/src/muse/sectors/preset_sector.py @@ -30,7 +30,7 @@ def factory(cls, name: str, settings: Any) -> PresetSector: read_timeslice_shares, ) from muse.regressions import endogenous_demand - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, convert_timeslice sector_conf = getattr(settings.sectors, name) presets = Dataset() @@ -118,9 +118,7 @@ def factory(cls, name: str, settings: Any) -> PresetSector: # add timeslice, if missing for component in {"supply", "consumption"}: if "timeslice" not in presets[component].dims: - presets[component] = convert_timeslice( - presets[component], TIMESLICE, QuantityType.INTENSIVE - ) + presets[component] = convert_timeslice(presets[component]) comm_usage = (presets.costs > 0).any(set(presets.costs.dims) - {"commodity"}) presets["comm_usage"] = ( diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index e9a310c6f..f9a9f70d0 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -283,7 +283,7 @@ def market_variables(self, market: xr.Dataset, technologies: xr.Dataset) -> Any: from muse.commodities import is_pollutant from muse.costs import annual_levelized_cost_of_energy, supply_cost from muse.quantities import consumption - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import convert_timeslice from muse.utilities import broadcast_techs years = market.year.values @@ -294,7 +294,7 @@ def market_variables(self, market: xr.Dataset, technologies: xr.Dataset) -> Any: market=market, capacity=capacity, technologies=technologies ) if "timeslice" in market.prices.dims and "timeslice" not in supply.dims: - supply = convert_timeslice(supply, TIMESLICE, QuantityType.INTENSIVE) + supply = convert_timeslice(supply) # Calculate consumption consume = consumption(technologies, supply, market.prices) diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index a462c661a..2a5f420dc 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -167,9 +167,12 @@ class QuantityType(Enum): EXTENSIVE = "extensive" -def convert_timeslice(x, ts, quantity): +def convert_timeslice(x, ts=None, quantity=QuantityType.INTENSIVE): from xarray import Coordinates + if ts is None: + ts = TIMESLICE + if hasattr(x, "timeslice"): x = x.sel(timeslice=ts["timeslice"]) return x From c7f67f4fe3e9c80fae79d9dfdd13fdfc9f5b5a38 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 11 Oct 2024 14:16:00 +0100 Subject: [PATCH 30/92] Update results files --- .../1-min-constraint/Results/Power_Supply.csv | 168 +++++++++--------- .../2-max-constraint/Results/Power_Supply.csv | 120 ++++++------- .../default_timeslice/Power_Supply.csv | 123 +++++++++++++ 3 files changed, 267 insertions(+), 144 deletions(-) create mode 100644 tests/example_outputs/default_timeslice/Power_Supply.csv diff --git a/docs/tutorial-code/min-max-timeslice-constraints/1-min-constraint/Results/Power_Supply.csv b/docs/tutorial-code/min-max-timeslice-constraints/1-min-constraint/Results/Power_Supply.csv index ff56d993c..c2a07461a 100644 --- a/docs/tutorial-code/min-max-timeslice-constraints/1-min-constraint/Results/Power_Supply.csv +++ b/docs/tutorial-code/min-max-timeslice-constraints/1-min-constraint/Results/Power_Supply.csv @@ -1,143 +1,143 @@ asset,comm_usage,commodity,day,hour,installed,month,region,supply,technology,timeslice,year 0,10,electricity,all-week,night,2020,all-year,R1,0.20000000000,gasCCGT,0,2020 -0,6,CO2f,all-week,night,2020,all-year,R1,18.33400000000,gasCCGT,0,2020 0,10,electricity,all-week,morning,2020,all-year,R1,0.40000000000,gasCCGT,1,2020 -0,6,CO2f,all-week,morning,2020,all-year,R1,36.66800000000,gasCCGT,1,2020 0,10,electricity,all-week,afternoon,2020,all-year,R1,0.60000000000,gasCCGT,2,2020 -0,6,CO2f,all-week,afternoon,2020,all-year,R1,55.00200000000,gasCCGT,2,2020 0,10,electricity,all-week,early-peak,2020,all-year,R1,0.40000000000,gasCCGT,3,2020 -0,6,CO2f,all-week,early-peak,2020,all-year,R1,36.66800000000,gasCCGT,3,2020 0,10,electricity,all-week,late-peak,2020,all-year,R1,0.80000000000,gasCCGT,4,2020 -0,6,CO2f,all-week,late-peak,2020,all-year,R1,73.33600000000,gasCCGT,4,2020 0,10,electricity,all-week,evening,2020,all-year,R1,1.00000000000,gasCCGT,5,2020 +0,6,CO2f,all-week,night,2020,all-year,R1,18.33400000000,gasCCGT,0,2020 +0,6,CO2f,all-week,morning,2020,all-year,R1,36.66800000000,gasCCGT,1,2020 +0,6,CO2f,all-week,afternoon,2020,all-year,R1,55.00200000000,gasCCGT,2,2020 +0,6,CO2f,all-week,early-peak,2020,all-year,R1,36.66800000000,gasCCGT,3,2020 +0,6,CO2f,all-week,late-peak,2020,all-year,R1,73.33600000000,gasCCGT,4,2020 0,6,CO2f,all-week,evening,2020,all-year,R1,91.67000000000,gasCCGT,5,2020 0,10,electricity,all-week,night,2020,all-year,R1,0.20000000000,gasCCGT,0,2025 -0,6,CO2f,all-week,night,2020,all-year,R1,18.33400000000,gasCCGT,0,2025 -1,10,electricity,all-week,night,2020,all-year,R1,0.22220000000,windturbine,0,2025 0,10,electricity,all-week,morning,2020,all-year,R1,0.40000000000,gasCCGT,1,2025 -0,6,CO2f,all-week,morning,2020,all-year,R1,36.66800000000,gasCCGT,1,2025 -1,10,electricity,all-week,morning,2020,all-year,R1,0.23330000000,windturbine,1,2025 0,10,electricity,all-week,afternoon,2020,all-year,R1,0.60000000000,gasCCGT,2,2025 -0,6,CO2f,all-week,afternoon,2020,all-year,R1,55.00200000000,gasCCGT,2,2025 0,10,electricity,all-week,early-peak,2020,all-year,R1,0.40000000000,gasCCGT,3,2025 -0,6,CO2f,all-week,early-peak,2020,all-year,R1,36.66800000000,gasCCGT,3,2025 -1,10,electricity,all-week,early-peak,2020,all-year,R1,0.23330000000,windturbine,3,2025 0,10,electricity,all-week,late-peak,2020,all-year,R1,0.80000000000,gasCCGT,4,2025 -0,6,CO2f,all-week,late-peak,2020,all-year,R1,73.33600000000,gasCCGT,4,2025 -1,10,electricity,all-week,late-peak,2020,all-year,R1,0.46670000000,windturbine,4,2025 0,10,electricity,all-week,evening,2020,all-year,R1,1.00000000000,gasCCGT,5,2025 +0,6,CO2f,all-week,night,2020,all-year,R1,18.33400000000,gasCCGT,0,2025 +0,6,CO2f,all-week,morning,2020,all-year,R1,36.66800000000,gasCCGT,1,2025 +0,6,CO2f,all-week,afternoon,2020,all-year,R1,55.00200000000,gasCCGT,2,2025 +0,6,CO2f,all-week,early-peak,2020,all-year,R1,36.66800000000,gasCCGT,3,2025 +0,6,CO2f,all-week,late-peak,2020,all-year,R1,73.33600000000,gasCCGT,4,2025 0,6,CO2f,all-week,evening,2020,all-year,R1,91.67000000000,gasCCGT,5,2025 +1,10,electricity,all-week,night,2020,all-year,R1,0.22220000000,windturbine,0,2025 +1,10,electricity,all-week,morning,2020,all-year,R1,0.23330000000,windturbine,1,2025 +1,10,electricity,all-week,early-peak,2020,all-year,R1,0.23330000000,windturbine,3,2025 +1,10,electricity,all-week,late-peak,2020,all-year,R1,0.46670000000,windturbine,4,2025 1,10,electricity,all-week,night,2020,all-year,R1,0.33330000000,windturbine,0,2030 -2,10,electricity,all-week,night,2025,all-year,R1,0.33330000000,windturbine,0,2030 1,10,electricity,all-week,morning,2020,all-year,R1,0.50000000000,windturbine,1,2030 -2,10,electricity,all-week,morning,2025,all-year,R1,0.50000000000,windturbine,1,2030 1,10,electricity,all-week,afternoon,2020,all-year,R1,0.33330000000,windturbine,2,2030 -2,10,electricity,all-week,afternoon,2025,all-year,R1,0.33330000000,windturbine,2,2030 1,10,electricity,all-week,early-peak,2020,all-year,R1,0.50000000000,windturbine,3,2030 -2,10,electricity,all-week,early-peak,2025,all-year,R1,0.50000000000,windturbine,3,2030 1,10,electricity,all-week,late-peak,2020,all-year,R1,1.00000000000,windturbine,4,2030 -2,10,electricity,all-week,late-peak,2025,all-year,R1,1.00000000000,windturbine,4,2030 1,10,electricity,all-week,evening,2020,all-year,R1,0.66670000000,windturbine,5,2030 +2,10,electricity,all-week,night,2025,all-year,R1,0.33330000000,windturbine,0,2030 +2,10,electricity,all-week,morning,2025,all-year,R1,0.50000000000,windturbine,1,2030 +2,10,electricity,all-week,afternoon,2025,all-year,R1,0.33330000000,windturbine,2,2030 +2,10,electricity,all-week,early-peak,2025,all-year,R1,0.50000000000,windturbine,3,2030 +2,10,electricity,all-week,late-peak,2025,all-year,R1,1.00000000000,windturbine,4,2030 2,10,electricity,all-week,evening,2025,all-year,R1,0.66670000000,windturbine,5,2030 1,10,electricity,all-week,night,2020,all-year,R1,0.20000000000,windturbine,0,2035 -2,10,electricity,all-week,night,2025,all-year,R1,0.20000000000,windturbine,0,2035 -3,10,electricity,all-week,night,2030,all-year,R1,0.40000000000,windturbine,0,2035 1,10,electricity,all-week,morning,2020,all-year,R1,0.30000000000,windturbine,1,2035 -2,10,electricity,all-week,morning,2025,all-year,R1,0.30000000000,windturbine,1,2035 -3,10,electricity,all-week,morning,2030,all-year,R1,0.60000000000,windturbine,1,2035 1,10,electricity,all-week,afternoon,2020,all-year,R1,0.20000000000,windturbine,2,2035 -2,10,electricity,all-week,afternoon,2025,all-year,R1,0.20000000000,windturbine,2,2035 -3,10,electricity,all-week,afternoon,2030,all-year,R1,0.40000000000,windturbine,2,2035 1,10,electricity,all-week,early-peak,2020,all-year,R1,0.30000000000,windturbine,3,2035 -2,10,electricity,all-week,early-peak,2025,all-year,R1,0.30000000000,windturbine,3,2035 -3,10,electricity,all-week,early-peak,2030,all-year,R1,0.60000000000,windturbine,3,2035 1,10,electricity,all-week,late-peak,2020,all-year,R1,0.60000000000,windturbine,4,2035 -2,10,electricity,all-week,late-peak,2025,all-year,R1,0.60000000000,windturbine,4,2035 -3,10,electricity,all-week,late-peak,2030,all-year,R1,1.20000000000,windturbine,4,2035 1,10,electricity,all-week,evening,2020,all-year,R1,0.40000000000,windturbine,5,2035 +2,10,electricity,all-week,night,2025,all-year,R1,0.20000000000,windturbine,0,2035 +2,10,electricity,all-week,morning,2025,all-year,R1,0.30000000000,windturbine,1,2035 +2,10,electricity,all-week,afternoon,2025,all-year,R1,0.20000000000,windturbine,2,2035 +2,10,electricity,all-week,early-peak,2025,all-year,R1,0.30000000000,windturbine,3,2035 +2,10,electricity,all-week,late-peak,2025,all-year,R1,0.60000000000,windturbine,4,2035 2,10,electricity,all-week,evening,2025,all-year,R1,0.40000000000,windturbine,5,2035 +3,10,electricity,all-week,night,2030,all-year,R1,0.40000000000,windturbine,0,2035 +3,10,electricity,all-week,morning,2030,all-year,R1,0.60000000000,windturbine,1,2035 +3,10,electricity,all-week,afternoon,2030,all-year,R1,0.40000000000,windturbine,2,2035 +3,10,electricity,all-week,early-peak,2030,all-year,R1,0.60000000000,windturbine,3,2035 +3,10,electricity,all-week,late-peak,2030,all-year,R1,1.20000000000,windturbine,4,2035 3,10,electricity,all-week,evening,2030,all-year,R1,0.80000000000,windturbine,5,2035 1,10,electricity,all-week,night,2020,all-year,R1,0.15560000000,windturbine,0,2040 -2,10,electricity,all-week,night,2025,all-year,R1,0.15560000000,windturbine,0,2040 -3,10,electricity,all-week,night,2030,all-year,R1,0.31110000000,windturbine,0,2040 -4,10,electricity,all-week,night,2035,all-year,R1,0.31110000000,windturbine,0,2040 1,10,electricity,all-week,morning,2020,all-year,R1,0.23330000000,windturbine,1,2040 -2,10,electricity,all-week,morning,2025,all-year,R1,0.23330000000,windturbine,1,2040 -3,10,electricity,all-week,morning,2030,all-year,R1,0.46670000000,windturbine,1,2040 -4,10,electricity,all-week,morning,2035,all-year,R1,0.46670000000,windturbine,1,2040 1,10,electricity,all-week,afternoon,2020,all-year,R1,0.15560000000,windturbine,2,2040 -2,10,electricity,all-week,afternoon,2025,all-year,R1,0.15560000000,windturbine,2,2040 -3,10,electricity,all-week,afternoon,2030,all-year,R1,0.31110000000,windturbine,2,2040 -4,10,electricity,all-week,afternoon,2035,all-year,R1,0.31110000000,windturbine,2,2040 1,10,electricity,all-week,early-peak,2020,all-year,R1,0.23330000000,windturbine,3,2040 -2,10,electricity,all-week,early-peak,2025,all-year,R1,0.23330000000,windturbine,3,2040 -3,10,electricity,all-week,early-peak,2030,all-year,R1,0.46670000000,windturbine,3,2040 -4,10,electricity,all-week,early-peak,2035,all-year,R1,0.46670000000,windturbine,3,2040 1,10,electricity,all-week,late-peak,2020,all-year,R1,0.46670000000,windturbine,4,2040 -2,10,electricity,all-week,late-peak,2025,all-year,R1,0.46670000000,windturbine,4,2040 -3,10,electricity,all-week,late-peak,2030,all-year,R1,0.93330000000,windturbine,4,2040 -4,10,electricity,all-week,late-peak,2035,all-year,R1,0.93330000000,windturbine,4,2040 1,10,electricity,all-week,evening,2020,all-year,R1,0.31110000000,windturbine,5,2040 +2,10,electricity,all-week,night,2025,all-year,R1,0.15560000000,windturbine,0,2040 +2,10,electricity,all-week,morning,2025,all-year,R1,0.23330000000,windturbine,1,2040 +2,10,electricity,all-week,afternoon,2025,all-year,R1,0.15560000000,windturbine,2,2040 +2,10,electricity,all-week,early-peak,2025,all-year,R1,0.23330000000,windturbine,3,2040 +2,10,electricity,all-week,late-peak,2025,all-year,R1,0.46670000000,windturbine,4,2040 2,10,electricity,all-week,evening,2025,all-year,R1,0.31110000000,windturbine,5,2040 +3,10,electricity,all-week,night,2030,all-year,R1,0.31110000000,windturbine,0,2040 +3,10,electricity,all-week,morning,2030,all-year,R1,0.46670000000,windturbine,1,2040 +3,10,electricity,all-week,afternoon,2030,all-year,R1,0.31110000000,windturbine,2,2040 +3,10,electricity,all-week,early-peak,2030,all-year,R1,0.46670000000,windturbine,3,2040 +3,10,electricity,all-week,late-peak,2030,all-year,R1,0.93330000000,windturbine,4,2040 3,10,electricity,all-week,evening,2030,all-year,R1,0.62220000000,windturbine,5,2040 +4,10,electricity,all-week,night,2035,all-year,R1,0.31110000000,windturbine,0,2040 +4,10,electricity,all-week,morning,2035,all-year,R1,0.46670000000,windturbine,1,2040 +4,10,electricity,all-week,afternoon,2035,all-year,R1,0.31110000000,windturbine,2,2040 +4,10,electricity,all-week,early-peak,2035,all-year,R1,0.46670000000,windturbine,3,2040 +4,10,electricity,all-week,late-peak,2035,all-year,R1,0.93330000000,windturbine,4,2040 4,10,electricity,all-week,evening,2035,all-year,R1,0.62220000000,windturbine,5,2040 1,10,electricity,all-week,night,2020,all-year,R1,0.13330000000,windturbine,0,2045 -2,10,electricity,all-week,night,2025,all-year,R1,0.13330000000,windturbine,0,2045 -3,10,electricity,all-week,night,2030,all-year,R1,0.26670000000,windturbine,0,2045 -4,10,electricity,all-week,night,2035,all-year,R1,0.26670000000,windturbine,0,2045 -5,10,electricity,all-week,night,2040,all-year,R1,0.26670000000,windturbine,0,2045 1,10,electricity,all-week,morning,2020,all-year,R1,0.20000000000,windturbine,1,2045 -2,10,electricity,all-week,morning,2025,all-year,R1,0.20000000000,windturbine,1,2045 -3,10,electricity,all-week,morning,2030,all-year,R1,0.40000000000,windturbine,1,2045 -4,10,electricity,all-week,morning,2035,all-year,R1,0.40000000000,windturbine,1,2045 -5,10,electricity,all-week,morning,2040,all-year,R1,0.40000000000,windturbine,1,2045 1,10,electricity,all-week,afternoon,2020,all-year,R1,0.13330000000,windturbine,2,2045 -2,10,electricity,all-week,afternoon,2025,all-year,R1,0.13330000000,windturbine,2,2045 -3,10,electricity,all-week,afternoon,2030,all-year,R1,0.26670000000,windturbine,2,2045 -4,10,electricity,all-week,afternoon,2035,all-year,R1,0.26670000000,windturbine,2,2045 -5,10,electricity,all-week,afternoon,2040,all-year,R1,0.26670000000,windturbine,2,2045 1,10,electricity,all-week,early-peak,2020,all-year,R1,0.20000000000,windturbine,3,2045 -2,10,electricity,all-week,early-peak,2025,all-year,R1,0.20000000000,windturbine,3,2045 -3,10,electricity,all-week,early-peak,2030,all-year,R1,0.40000000000,windturbine,3,2045 -4,10,electricity,all-week,early-peak,2035,all-year,R1,0.40000000000,windturbine,3,2045 -5,10,electricity,all-week,early-peak,2040,all-year,R1,0.40000000000,windturbine,3,2045 1,10,electricity,all-week,late-peak,2020,all-year,R1,0.40000000000,windturbine,4,2045 -2,10,electricity,all-week,late-peak,2025,all-year,R1,0.40000000000,windturbine,4,2045 -3,10,electricity,all-week,late-peak,2030,all-year,R1,0.80000000000,windturbine,4,2045 -4,10,electricity,all-week,late-peak,2035,all-year,R1,0.80000000000,windturbine,4,2045 -5,10,electricity,all-week,late-peak,2040,all-year,R1,0.80000000000,windturbine,4,2045 1,10,electricity,all-week,evening,2020,all-year,R1,0.26670000000,windturbine,5,2045 +2,10,electricity,all-week,night,2025,all-year,R1,0.13330000000,windturbine,0,2045 +2,10,electricity,all-week,morning,2025,all-year,R1,0.20000000000,windturbine,1,2045 +2,10,electricity,all-week,afternoon,2025,all-year,R1,0.13330000000,windturbine,2,2045 +2,10,electricity,all-week,early-peak,2025,all-year,R1,0.20000000000,windturbine,3,2045 +2,10,electricity,all-week,late-peak,2025,all-year,R1,0.40000000000,windturbine,4,2045 2,10,electricity,all-week,evening,2025,all-year,R1,0.26670000000,windturbine,5,2045 +3,10,electricity,all-week,night,2030,all-year,R1,0.26670000000,windturbine,0,2045 +3,10,electricity,all-week,morning,2030,all-year,R1,0.40000000000,windturbine,1,2045 +3,10,electricity,all-week,afternoon,2030,all-year,R1,0.26670000000,windturbine,2,2045 +3,10,electricity,all-week,early-peak,2030,all-year,R1,0.40000000000,windturbine,3,2045 +3,10,electricity,all-week,late-peak,2030,all-year,R1,0.80000000000,windturbine,4,2045 3,10,electricity,all-week,evening,2030,all-year,R1,0.53330000000,windturbine,5,2045 +4,10,electricity,all-week,night,2035,all-year,R1,0.26670000000,windturbine,0,2045 +4,10,electricity,all-week,morning,2035,all-year,R1,0.40000000000,windturbine,1,2045 +4,10,electricity,all-week,afternoon,2035,all-year,R1,0.26670000000,windturbine,2,2045 +4,10,electricity,all-week,early-peak,2035,all-year,R1,0.40000000000,windturbine,3,2045 +4,10,electricity,all-week,late-peak,2035,all-year,R1,0.80000000000,windturbine,4,2045 4,10,electricity,all-week,evening,2035,all-year,R1,0.53330000000,windturbine,5,2045 +5,10,electricity,all-week,night,2040,all-year,R1,0.26670000000,windturbine,0,2045 +5,10,electricity,all-week,morning,2040,all-year,R1,0.40000000000,windturbine,1,2045 +5,10,electricity,all-week,afternoon,2040,all-year,R1,0.26670000000,windturbine,2,2045 +5,10,electricity,all-week,early-peak,2040,all-year,R1,0.40000000000,windturbine,3,2045 +5,10,electricity,all-week,late-peak,2040,all-year,R1,0.80000000000,windturbine,4,2045 5,10,electricity,all-week,evening,2040,all-year,R1,0.53330000000,windturbine,5,2045 2,10,electricity,all-week,night,2025,all-year,R1,0.13330000000,windturbine,0,2050 -3,10,electricity,all-week,night,2030,all-year,R1,0.26670000000,windturbine,0,2050 -4,10,electricity,all-week,night,2035,all-year,R1,0.26670000000,windturbine,0,2050 -5,10,electricity,all-week,night,2040,all-year,R1,0.26670000000,windturbine,0,2050 -6,10,electricity,all-week,night,2045,all-year,R1,0.26670000000,windturbine,0,2050 2,10,electricity,all-week,morning,2025,all-year,R1,0.20000000000,windturbine,1,2050 -3,10,electricity,all-week,morning,2030,all-year,R1,0.40000000000,windturbine,1,2050 -4,10,electricity,all-week,morning,2035,all-year,R1,0.40000000000,windturbine,1,2050 -5,10,electricity,all-week,morning,2040,all-year,R1,0.40000000000,windturbine,1,2050 -6,10,electricity,all-week,morning,2045,all-year,R1,0.40000000000,windturbine,1,2050 2,10,electricity,all-week,afternoon,2025,all-year,R1,0.13330000000,windturbine,2,2050 -3,10,electricity,all-week,afternoon,2030,all-year,R1,0.26670000000,windturbine,2,2050 -4,10,electricity,all-week,afternoon,2035,all-year,R1,0.26670000000,windturbine,2,2050 -5,10,electricity,all-week,afternoon,2040,all-year,R1,0.26670000000,windturbine,2,2050 -6,10,electricity,all-week,afternoon,2045,all-year,R1,0.26670000000,windturbine,2,2050 2,10,electricity,all-week,early-peak,2025,all-year,R1,0.20000000000,windturbine,3,2050 -3,10,electricity,all-week,early-peak,2030,all-year,R1,0.40000000000,windturbine,3,2050 -4,10,electricity,all-week,early-peak,2035,all-year,R1,0.40000000000,windturbine,3,2050 -5,10,electricity,all-week,early-peak,2040,all-year,R1,0.40000000000,windturbine,3,2050 -6,10,electricity,all-week,early-peak,2045,all-year,R1,0.40000000000,windturbine,3,2050 2,10,electricity,all-week,late-peak,2025,all-year,R1,0.40000000000,windturbine,4,2050 -3,10,electricity,all-week,late-peak,2030,all-year,R1,0.80000000000,windturbine,4,2050 -4,10,electricity,all-week,late-peak,2035,all-year,R1,0.80000000000,windturbine,4,2050 -5,10,electricity,all-week,late-peak,2040,all-year,R1,0.80000000000,windturbine,4,2050 -6,10,electricity,all-week,late-peak,2045,all-year,R1,0.80000000000,windturbine,4,2050 2,10,electricity,all-week,evening,2025,all-year,R1,0.26670000000,windturbine,5,2050 +3,10,electricity,all-week,night,2030,all-year,R1,0.26670000000,windturbine,0,2050 +3,10,electricity,all-week,morning,2030,all-year,R1,0.40000000000,windturbine,1,2050 +3,10,electricity,all-week,afternoon,2030,all-year,R1,0.26670000000,windturbine,2,2050 +3,10,electricity,all-week,early-peak,2030,all-year,R1,0.40000000000,windturbine,3,2050 +3,10,electricity,all-week,late-peak,2030,all-year,R1,0.80000000000,windturbine,4,2050 3,10,electricity,all-week,evening,2030,all-year,R1,0.53330000000,windturbine,5,2050 +4,10,electricity,all-week,night,2035,all-year,R1,0.26670000000,windturbine,0,2050 +4,10,electricity,all-week,morning,2035,all-year,R1,0.40000000000,windturbine,1,2050 +4,10,electricity,all-week,afternoon,2035,all-year,R1,0.26670000000,windturbine,2,2050 +4,10,electricity,all-week,early-peak,2035,all-year,R1,0.40000000000,windturbine,3,2050 +4,10,electricity,all-week,late-peak,2035,all-year,R1,0.80000000000,windturbine,4,2050 4,10,electricity,all-week,evening,2035,all-year,R1,0.53330000000,windturbine,5,2050 +5,10,electricity,all-week,night,2040,all-year,R1,0.26670000000,windturbine,0,2050 +5,10,electricity,all-week,morning,2040,all-year,R1,0.40000000000,windturbine,1,2050 +5,10,electricity,all-week,afternoon,2040,all-year,R1,0.26670000000,windturbine,2,2050 +5,10,electricity,all-week,early-peak,2040,all-year,R1,0.40000000000,windturbine,3,2050 +5,10,electricity,all-week,late-peak,2040,all-year,R1,0.80000000000,windturbine,4,2050 5,10,electricity,all-week,evening,2040,all-year,R1,0.53330000000,windturbine,5,2050 +6,10,electricity,all-week,night,2045,all-year,R1,0.26670000000,windturbine,0,2050 +6,10,electricity,all-week,morning,2045,all-year,R1,0.40000000000,windturbine,1,2050 +6,10,electricity,all-week,afternoon,2045,all-year,R1,0.26670000000,windturbine,2,2050 +6,10,electricity,all-week,early-peak,2045,all-year,R1,0.40000000000,windturbine,3,2050 +6,10,electricity,all-week,late-peak,2045,all-year,R1,0.80000000000,windturbine,4,2050 6,10,electricity,all-week,evening,2045,all-year,R1,0.53330000000,windturbine,5,2050 diff --git a/docs/tutorial-code/min-max-timeslice-constraints/2-max-constraint/Results/Power_Supply.csv b/docs/tutorial-code/min-max-timeslice-constraints/2-max-constraint/Results/Power_Supply.csv index 173adbe8e..421a270fb 100644 --- a/docs/tutorial-code/min-max-timeslice-constraints/2-max-constraint/Results/Power_Supply.csv +++ b/docs/tutorial-code/min-max-timeslice-constraints/2-max-constraint/Results/Power_Supply.csv @@ -1,27 +1,27 @@ asset,comm_usage,commodity,day,hour,installed,month,region,supply,technology,timeslice,year 0,10,electricity,all-week,night,2020,all-year,R1,0.20000000000,gasCCGT,0,2020 -0,6,CO2f,all-week,night,2020,all-year,R1,18.33400000000,gasCCGT,0,2020 0,10,electricity,all-week,morning,2020,all-year,R1,0.40000000000,gasCCGT,1,2020 -0,6,CO2f,all-week,morning,2020,all-year,R1,36.66800000000,gasCCGT,1,2020 0,10,electricity,all-week,afternoon,2020,all-year,R1,0.60000000000,gasCCGT,2,2020 -0,6,CO2f,all-week,afternoon,2020,all-year,R1,55.00200000000,gasCCGT,2,2020 0,10,electricity,all-week,early-peak,2020,all-year,R1,0.40000000000,gasCCGT,3,2020 -0,6,CO2f,all-week,early-peak,2020,all-year,R1,36.66800000000,gasCCGT,3,2020 0,10,electricity,all-week,late-peak,2020,all-year,R1,0.80000000000,gasCCGT,4,2020 -0,6,CO2f,all-week,late-peak,2020,all-year,R1,73.33600000000,gasCCGT,4,2020 0,10,electricity,all-week,evening,2020,all-year,R1,1.00000000000,gasCCGT,5,2020 +0,6,CO2f,all-week,night,2020,all-year,R1,18.33400000000,gasCCGT,0,2020 +0,6,CO2f,all-week,morning,2020,all-year,R1,36.66800000000,gasCCGT,1,2020 +0,6,CO2f,all-week,afternoon,2020,all-year,R1,55.00200000000,gasCCGT,2,2020 +0,6,CO2f,all-week,early-peak,2020,all-year,R1,36.66800000000,gasCCGT,3,2020 +0,6,CO2f,all-week,late-peak,2020,all-year,R1,73.33600000000,gasCCGT,4,2020 0,6,CO2f,all-week,evening,2020,all-year,R1,91.67000000000,gasCCGT,5,2020 0,10,electricity,all-week,night,2020,all-year,R1,0.42220000000,gasCCGT,0,2025 -0,6,CO2f,all-week,night,2020,all-year,R1,38.70510000000,gasCCGT,0,2025 0,10,electricity,all-week,morning,2020,all-year,R1,0.63330000000,gasCCGT,1,2025 -0,6,CO2f,all-week,morning,2020,all-year,R1,58.05770000000,gasCCGT,1,2025 0,10,electricity,all-week,afternoon,2020,all-year,R1,0.60000000000,gasCCGT,2,2025 -0,6,CO2f,all-week,afternoon,2020,all-year,R1,55.00200000000,gasCCGT,2,2025 0,10,electricity,all-week,early-peak,2020,all-year,R1,0.63330000000,gasCCGT,3,2025 -0,6,CO2f,all-week,early-peak,2020,all-year,R1,58.05770000000,gasCCGT,3,2025 0,10,electricity,all-week,late-peak,2020,all-year,R1,1.26670000000,gasCCGT,4,2025 -0,6,CO2f,all-week,late-peak,2020,all-year,R1,116.11530000000,gasCCGT,4,2025 0,10,electricity,all-week,evening,2020,all-year,R1,1.00000000000,gasCCGT,5,2025 +0,6,CO2f,all-week,night,2020,all-year,R1,38.70510000000,gasCCGT,0,2025 +0,6,CO2f,all-week,morning,2020,all-year,R1,58.05770000000,gasCCGT,1,2025 +0,6,CO2f,all-week,afternoon,2020,all-year,R1,55.00200000000,gasCCGT,2,2025 +0,6,CO2f,all-week,early-peak,2020,all-year,R1,58.05770000000,gasCCGT,3,2025 +0,6,CO2f,all-week,late-peak,2020,all-year,R1,116.11530000000,gasCCGT,4,2025 0,6,CO2f,all-week,evening,2020,all-year,R1,91.67000000000,gasCCGT,5,2025 4,10,electricity,all-week,night,2025,all-year,R1,0.66670000000,windturbine,0,2030 4,10,electricity,all-week,morning,2025,all-year,R1,0.50000000000,windturbine,1,2030 @@ -30,86 +30,86 @@ asset,comm_usage,commodity,day,hour,installed,month,region,supply,technology,tim 4,10,electricity,all-week,late-peak,2025,all-year,R1,1.00000000000,windturbine,4,2030 4,10,electricity,all-week,evening,2025,all-year,R1,1.00000000000,windturbine,5,2030 5,10,electricity,all-week,night,2025,all-year,R1,0.40000000000,windturbine,0,2035 -6,10,electricity,all-week,night,2030,all-year,R1,0.40000000000,windturbine,0,2035 5,10,electricity,all-week,morning,2025,all-year,R1,0.50000000000,windturbine,1,2035 -6,10,electricity,all-week,morning,2030,all-year,R1,0.50000000000,windturbine,1,2035 5,10,electricity,all-week,afternoon,2025,all-year,R1,0.40000000000,windturbine,2,2035 -6,10,electricity,all-week,afternoon,2030,all-year,R1,0.40000000000,windturbine,2,2035 5,10,electricity,all-week,early-peak,2025,all-year,R1,0.60000000000,windturbine,3,2035 -6,10,electricity,all-week,early-peak,2030,all-year,R1,0.60000000000,windturbine,3,2035 5,10,electricity,all-week,late-peak,2025,all-year,R1,1.00000000000,windturbine,4,2035 -6,10,electricity,all-week,late-peak,2030,all-year,R1,1.00000000000,windturbine,4,2035 5,10,electricity,all-week,evening,2025,all-year,R1,0.80000000000,windturbine,5,2035 +6,10,electricity,all-week,night,2030,all-year,R1,0.40000000000,windturbine,0,2035 +6,10,electricity,all-week,morning,2030,all-year,R1,0.50000000000,windturbine,1,2035 +6,10,electricity,all-week,afternoon,2030,all-year,R1,0.40000000000,windturbine,2,2035 +6,10,electricity,all-week,early-peak,2030,all-year,R1,0.60000000000,windturbine,3,2035 +6,10,electricity,all-week,late-peak,2030,all-year,R1,1.00000000000,windturbine,4,2035 6,10,electricity,all-week,evening,2030,all-year,R1,0.80000000000,windturbine,5,2035 6,10,electricity,all-week,night,2025,all-year,R1,0.31110000000,windturbine,0,2040 -7,10,electricity,all-week,night,2030,all-year,R1,0.31110000000,windturbine,0,2040 -8,10,electricity,all-week,night,2035,all-year,R1,0.31110000000,windturbine,0,2040 6,10,electricity,all-week,morning,2025,all-year,R1,0.46670000000,windturbine,1,2040 -7,10,electricity,all-week,morning,2030,all-year,R1,0.46670000000,windturbine,1,2040 -8,10,electricity,all-week,morning,2035,all-year,R1,0.46670000000,windturbine,1,2040 6,10,electricity,all-week,afternoon,2025,all-year,R1,0.31110000000,windturbine,2,2040 -7,10,electricity,all-week,afternoon,2030,all-year,R1,0.31110000000,windturbine,2,2040 -8,10,electricity,all-week,afternoon,2035,all-year,R1,0.31110000000,windturbine,2,2040 6,10,electricity,all-week,early-peak,2025,all-year,R1,0.46670000000,windturbine,3,2040 -7,10,electricity,all-week,early-peak,2030,all-year,R1,0.46670000000,windturbine,3,2040 -8,10,electricity,all-week,early-peak,2035,all-year,R1,0.46670000000,windturbine,3,2040 6,10,electricity,all-week,late-peak,2025,all-year,R1,0.93330000000,windturbine,4,2040 -7,10,electricity,all-week,late-peak,2030,all-year,R1,0.93330000000,windturbine,4,2040 -8,10,electricity,all-week,late-peak,2035,all-year,R1,0.93330000000,windturbine,4,2040 6,10,electricity,all-week,evening,2025,all-year,R1,0.62220000000,windturbine,5,2040 +7,10,electricity,all-week,night,2030,all-year,R1,0.31110000000,windturbine,0,2040 +7,10,electricity,all-week,morning,2030,all-year,R1,0.46670000000,windturbine,1,2040 +7,10,electricity,all-week,afternoon,2030,all-year,R1,0.31110000000,windturbine,2,2040 +7,10,electricity,all-week,early-peak,2030,all-year,R1,0.46670000000,windturbine,3,2040 +7,10,electricity,all-week,late-peak,2030,all-year,R1,0.93330000000,windturbine,4,2040 7,10,electricity,all-week,evening,2030,all-year,R1,0.62220000000,windturbine,5,2040 +8,10,electricity,all-week,night,2035,all-year,R1,0.31110000000,windturbine,0,2040 +8,10,electricity,all-week,morning,2035,all-year,R1,0.46670000000,windturbine,1,2040 +8,10,electricity,all-week,afternoon,2035,all-year,R1,0.31110000000,windturbine,2,2040 +8,10,electricity,all-week,early-peak,2035,all-year,R1,0.46670000000,windturbine,3,2040 +8,10,electricity,all-week,late-peak,2035,all-year,R1,0.93330000000,windturbine,4,2040 8,10,electricity,all-week,evening,2035,all-year,R1,0.62220000000,windturbine,5,2040 7,10,electricity,all-week,night,2025,all-year,R1,0.21330000000,windturbine,0,2045 -8,10,electricity,all-week,night,2030,all-year,R1,0.21330000000,windturbine,0,2045 -9,10,electricity,all-week,night,2035,all-year,R1,0.21330000000,windturbine,0,2045 -10,10,electricity,all-week,night,2040,all-year,R1,0.42670000000,windturbine,0,2045 7,10,electricity,all-week,morning,2025,all-year,R1,0.32000000000,windturbine,1,2045 -8,10,electricity,all-week,morning,2030,all-year,R1,0.32000000000,windturbine,1,2045 -9,10,electricity,all-week,morning,2035,all-year,R1,0.32000000000,windturbine,1,2045 -10,10,electricity,all-week,morning,2040,all-year,R1,0.64000000000,windturbine,1,2045 7,10,electricity,all-week,afternoon,2025,all-year,R1,0.21330000000,windturbine,2,2045 -8,10,electricity,all-week,afternoon,2030,all-year,R1,0.21330000000,windturbine,2,2045 -9,10,electricity,all-week,afternoon,2035,all-year,R1,0.21330000000,windturbine,2,2045 -10,10,electricity,all-week,afternoon,2040,all-year,R1,0.42670000000,windturbine,2,2045 7,10,electricity,all-week,early-peak,2025,all-year,R1,0.32000000000,windturbine,3,2045 -8,10,electricity,all-week,early-peak,2030,all-year,R1,0.32000000000,windturbine,3,2045 -9,10,electricity,all-week,early-peak,2035,all-year,R1,0.32000000000,windturbine,3,2045 -10,10,electricity,all-week,early-peak,2040,all-year,R1,0.64000000000,windturbine,3,2045 7,10,electricity,all-week,late-peak,2025,all-year,R1,0.64000000000,windturbine,4,2045 -8,10,electricity,all-week,late-peak,2030,all-year,R1,0.64000000000,windturbine,4,2045 -9,10,electricity,all-week,late-peak,2035,all-year,R1,0.64000000000,windturbine,4,2045 -10,10,electricity,all-week,late-peak,2040,all-year,R1,1.28000000000,windturbine,4,2045 7,10,electricity,all-week,evening,2025,all-year,R1,0.42670000000,windturbine,5,2045 +8,10,electricity,all-week,night,2030,all-year,R1,0.21330000000,windturbine,0,2045 +8,10,electricity,all-week,morning,2030,all-year,R1,0.32000000000,windturbine,1,2045 +8,10,electricity,all-week,afternoon,2030,all-year,R1,0.21330000000,windturbine,2,2045 +8,10,electricity,all-week,early-peak,2030,all-year,R1,0.32000000000,windturbine,3,2045 +8,10,electricity,all-week,late-peak,2030,all-year,R1,0.64000000000,windturbine,4,2045 8,10,electricity,all-week,evening,2030,all-year,R1,0.42670000000,windturbine,5,2045 +9,10,electricity,all-week,night,2035,all-year,R1,0.21330000000,windturbine,0,2045 +9,10,electricity,all-week,morning,2035,all-year,R1,0.32000000000,windturbine,1,2045 +9,10,electricity,all-week,afternoon,2035,all-year,R1,0.21330000000,windturbine,2,2045 +9,10,electricity,all-week,early-peak,2035,all-year,R1,0.32000000000,windturbine,3,2045 +9,10,electricity,all-week,late-peak,2035,all-year,R1,0.64000000000,windturbine,4,2045 9,10,electricity,all-week,evening,2035,all-year,R1,0.42670000000,windturbine,5,2045 +10,10,electricity,all-week,night,2040,all-year,R1,0.42670000000,windturbine,0,2045 +10,10,electricity,all-week,morning,2040,all-year,R1,0.64000000000,windturbine,1,2045 +10,10,electricity,all-week,afternoon,2040,all-year,R1,0.42670000000,windturbine,2,2045 +10,10,electricity,all-week,early-peak,2040,all-year,R1,0.64000000000,windturbine,3,2045 +10,10,electricity,all-week,late-peak,2040,all-year,R1,1.28000000000,windturbine,4,2045 10,10,electricity,all-week,evening,2040,all-year,R1,0.85330000000,windturbine,5,2045 8,10,electricity,all-week,night,2025,all-year,R1,0.17140000000,windturbine,0,2050 -9,10,electricity,all-week,night,2030,all-year,R1,0.17140000000,windturbine,0,2050 -10,10,electricity,all-week,night,2035,all-year,R1,0.17140000000,windturbine,0,2050 -11,10,electricity,all-week,night,2040,all-year,R1,0.34290000000,windturbine,0,2050 -12,10,electricity,all-week,night,2045,all-year,R1,0.34290000000,windturbine,0,2050 8,10,electricity,all-week,morning,2025,all-year,R1,0.25710000000,windturbine,1,2050 -9,10,electricity,all-week,morning,2030,all-year,R1,0.25710000000,windturbine,1,2050 -10,10,electricity,all-week,morning,2035,all-year,R1,0.25710000000,windturbine,1,2050 -11,10,electricity,all-week,morning,2040,all-year,R1,0.51430000000,windturbine,1,2050 -12,10,electricity,all-week,morning,2045,all-year,R1,0.51430000000,windturbine,1,2050 8,10,electricity,all-week,afternoon,2025,all-year,R1,0.17140000000,windturbine,2,2050 -9,10,electricity,all-week,afternoon,2030,all-year,R1,0.17140000000,windturbine,2,2050 -10,10,electricity,all-week,afternoon,2035,all-year,R1,0.17140000000,windturbine,2,2050 -11,10,electricity,all-week,afternoon,2040,all-year,R1,0.34290000000,windturbine,2,2050 -12,10,electricity,all-week,afternoon,2045,all-year,R1,0.34290000000,windturbine,2,2050 8,10,electricity,all-week,early-peak,2025,all-year,R1,0.25710000000,windturbine,3,2050 -9,10,electricity,all-week,early-peak,2030,all-year,R1,0.25710000000,windturbine,3,2050 -10,10,electricity,all-week,early-peak,2035,all-year,R1,0.25710000000,windturbine,3,2050 -11,10,electricity,all-week,early-peak,2040,all-year,R1,0.51430000000,windturbine,3,2050 -12,10,electricity,all-week,early-peak,2045,all-year,R1,0.51430000000,windturbine,3,2050 8,10,electricity,all-week,late-peak,2025,all-year,R1,0.51430000000,windturbine,4,2050 -9,10,electricity,all-week,late-peak,2030,all-year,R1,0.51430000000,windturbine,4,2050 -10,10,electricity,all-week,late-peak,2035,all-year,R1,0.51430000000,windturbine,4,2050 -11,10,electricity,all-week,late-peak,2040,all-year,R1,1.02860000000,windturbine,4,2050 -12,10,electricity,all-week,late-peak,2045,all-year,R1,1.02860000000,windturbine,4,2050 8,10,electricity,all-week,evening,2025,all-year,R1,0.34290000000,windturbine,5,2050 +9,10,electricity,all-week,night,2030,all-year,R1,0.17140000000,windturbine,0,2050 +9,10,electricity,all-week,morning,2030,all-year,R1,0.25710000000,windturbine,1,2050 +9,10,electricity,all-week,afternoon,2030,all-year,R1,0.17140000000,windturbine,2,2050 +9,10,electricity,all-week,early-peak,2030,all-year,R1,0.25710000000,windturbine,3,2050 +9,10,electricity,all-week,late-peak,2030,all-year,R1,0.51430000000,windturbine,4,2050 9,10,electricity,all-week,evening,2030,all-year,R1,0.34290000000,windturbine,5,2050 +10,10,electricity,all-week,night,2035,all-year,R1,0.17140000000,windturbine,0,2050 +10,10,electricity,all-week,morning,2035,all-year,R1,0.25710000000,windturbine,1,2050 +10,10,electricity,all-week,afternoon,2035,all-year,R1,0.17140000000,windturbine,2,2050 +10,10,electricity,all-week,early-peak,2035,all-year,R1,0.25710000000,windturbine,3,2050 +10,10,electricity,all-week,late-peak,2035,all-year,R1,0.51430000000,windturbine,4,2050 10,10,electricity,all-week,evening,2035,all-year,R1,0.34290000000,windturbine,5,2050 +11,10,electricity,all-week,night,2040,all-year,R1,0.34290000000,windturbine,0,2050 +11,10,electricity,all-week,morning,2040,all-year,R1,0.51430000000,windturbine,1,2050 +11,10,electricity,all-week,afternoon,2040,all-year,R1,0.34290000000,windturbine,2,2050 +11,10,electricity,all-week,early-peak,2040,all-year,R1,0.51430000000,windturbine,3,2050 +11,10,electricity,all-week,late-peak,2040,all-year,R1,1.02860000000,windturbine,4,2050 11,10,electricity,all-week,evening,2040,all-year,R1,0.68570000000,windturbine,5,2050 +12,10,electricity,all-week,night,2045,all-year,R1,0.34290000000,windturbine,0,2050 +12,10,electricity,all-week,morning,2045,all-year,R1,0.51430000000,windturbine,1,2050 +12,10,electricity,all-week,afternoon,2045,all-year,R1,0.34290000000,windturbine,2,2050 +12,10,electricity,all-week,early-peak,2045,all-year,R1,0.51430000000,windturbine,3,2050 +12,10,electricity,all-week,late-peak,2045,all-year,R1,1.02860000000,windturbine,4,2050 12,10,electricity,all-week,evening,2045,all-year,R1,0.68570000000,windturbine,5,2050 diff --git a/tests/example_outputs/default_timeslice/Power_Supply.csv b/tests/example_outputs/default_timeslice/Power_Supply.csv new file mode 100644 index 000000000..6717cad0e --- /dev/null +++ b/tests/example_outputs/default_timeslice/Power_Supply.csv @@ -0,0 +1,123 @@ +asset,comm_usage,commodity,day,hour,installed,month,region,supply,technology,timeslice,year +0,10,electricity,all-week,late-peak,2020,all-year,R1,0.26670000000,gasCCGT,4,2025 +0,6,CO2f,all-week,late-peak,2020,all-year,R1,24.44530000000,gasCCGT,4,2025 +1,10,electricity,all-week,night,2020,all-year,R1,0.42220000000,windturbine,0,2025 +1,10,electricity,all-week,morning,2020,all-year,R1,0.63330000000,windturbine,1,2025 +1,10,electricity,all-week,afternoon,2020,all-year,R1,0.42220000000,windturbine,2,2025 +1,10,electricity,all-week,early-peak,2020,all-year,R1,0.63330000000,windturbine,3,2025 +1,10,electricity,all-week,late-peak,2020,all-year,R1,1.00000000000,windturbine,4,2025 +1,10,electricity,all-week,evening,2020,all-year,R1,0.84440000000,windturbine,5,2025 +1,10,electricity,all-week,night,2020,all-year,R1,0.33330000000,windturbine,0,2030 +1,10,electricity,all-week,morning,2020,all-year,R1,0.50000000000,windturbine,1,2030 +1,10,electricity,all-week,afternoon,2020,all-year,R1,0.33330000000,windturbine,2,2030 +1,10,electricity,all-week,early-peak,2020,all-year,R1,0.50000000000,windturbine,3,2030 +1,10,electricity,all-week,late-peak,2020,all-year,R1,1.00000000000,windturbine,4,2030 +1,10,electricity,all-week,evening,2020,all-year,R1,0.66670000000,windturbine,5,2030 +2,10,electricity,all-week,night,2025,all-year,R1,0.33330000000,windturbine,0,2030 +2,10,electricity,all-week,morning,2025,all-year,R1,0.50000000000,windturbine,1,2030 +2,10,electricity,all-week,afternoon,2025,all-year,R1,0.33330000000,windturbine,2,2030 +2,10,electricity,all-week,early-peak,2025,all-year,R1,0.50000000000,windturbine,3,2030 +2,10,electricity,all-week,late-peak,2025,all-year,R1,1.00000000000,windturbine,4,2030 +2,10,electricity,all-week,evening,2025,all-year,R1,0.66670000000,windturbine,5,2030 +1,10,electricity,all-week,night,2020,all-year,R1,0.20000000000,windturbine,0,2035 +1,10,electricity,all-week,morning,2020,all-year,R1,0.30000000000,windturbine,1,2035 +1,10,electricity,all-week,afternoon,2020,all-year,R1,0.20000000000,windturbine,2,2035 +1,10,electricity,all-week,early-peak,2020,all-year,R1,0.30000000000,windturbine,3,2035 +1,10,electricity,all-week,late-peak,2020,all-year,R1,0.60000000000,windturbine,4,2035 +1,10,electricity,all-week,evening,2020,all-year,R1,0.40000000000,windturbine,5,2035 +2,10,electricity,all-week,night,2025,all-year,R1,0.20000000000,windturbine,0,2035 +2,10,electricity,all-week,morning,2025,all-year,R1,0.30000000000,windturbine,1,2035 +2,10,electricity,all-week,afternoon,2025,all-year,R1,0.20000000000,windturbine,2,2035 +2,10,electricity,all-week,early-peak,2025,all-year,R1,0.30000000000,windturbine,3,2035 +2,10,electricity,all-week,late-peak,2025,all-year,R1,0.60000000000,windturbine,4,2035 +2,10,electricity,all-week,evening,2025,all-year,R1,0.40000000000,windturbine,5,2035 +3,10,electricity,all-week,night,2030,all-year,R1,0.40000000000,windturbine,0,2035 +3,10,electricity,all-week,morning,2030,all-year,R1,0.60000000000,windturbine,1,2035 +3,10,electricity,all-week,afternoon,2030,all-year,R1,0.40000000000,windturbine,2,2035 +3,10,electricity,all-week,early-peak,2030,all-year,R1,0.60000000000,windturbine,3,2035 +3,10,electricity,all-week,late-peak,2030,all-year,R1,1.20000000000,windturbine,4,2035 +3,10,electricity,all-week,evening,2030,all-year,R1,0.80000000000,windturbine,5,2035 +1,10,electricity,all-week,night,2020,all-year,R1,0.15560000000,windturbine,0,2040 +1,10,electricity,all-week,morning,2020,all-year,R1,0.23330000000,windturbine,1,2040 +1,10,electricity,all-week,afternoon,2020,all-year,R1,0.15560000000,windturbine,2,2040 +1,10,electricity,all-week,early-peak,2020,all-year,R1,0.23330000000,windturbine,3,2040 +1,10,electricity,all-week,late-peak,2020,all-year,R1,0.46670000000,windturbine,4,2040 +1,10,electricity,all-week,evening,2020,all-year,R1,0.31110000000,windturbine,5,2040 +2,10,electricity,all-week,night,2025,all-year,R1,0.15560000000,windturbine,0,2040 +2,10,electricity,all-week,morning,2025,all-year,R1,0.23330000000,windturbine,1,2040 +2,10,electricity,all-week,afternoon,2025,all-year,R1,0.15560000000,windturbine,2,2040 +2,10,electricity,all-week,early-peak,2025,all-year,R1,0.23330000000,windturbine,3,2040 +2,10,electricity,all-week,late-peak,2025,all-year,R1,0.46670000000,windturbine,4,2040 +2,10,electricity,all-week,evening,2025,all-year,R1,0.31110000000,windturbine,5,2040 +3,10,electricity,all-week,night,2030,all-year,R1,0.31110000000,windturbine,0,2040 +3,10,electricity,all-week,morning,2030,all-year,R1,0.46670000000,windturbine,1,2040 +3,10,electricity,all-week,afternoon,2030,all-year,R1,0.31110000000,windturbine,2,2040 +3,10,electricity,all-week,early-peak,2030,all-year,R1,0.46670000000,windturbine,3,2040 +3,10,electricity,all-week,late-peak,2030,all-year,R1,0.93330000000,windturbine,4,2040 +3,10,electricity,all-week,evening,2030,all-year,R1,0.62220000000,windturbine,5,2040 +4,10,electricity,all-week,night,2035,all-year,R1,0.31110000000,windturbine,0,2040 +4,10,electricity,all-week,morning,2035,all-year,R1,0.46670000000,windturbine,1,2040 +4,10,electricity,all-week,afternoon,2035,all-year,R1,0.31110000000,windturbine,2,2040 +4,10,electricity,all-week,early-peak,2035,all-year,R1,0.46670000000,windturbine,3,2040 +4,10,electricity,all-week,late-peak,2035,all-year,R1,0.93330000000,windturbine,4,2040 +4,10,electricity,all-week,evening,2035,all-year,R1,0.62220000000,windturbine,5,2040 +1,10,electricity,all-week,night,2020,all-year,R1,0.13330000000,windturbine,0,2045 +1,10,electricity,all-week,morning,2020,all-year,R1,0.20000000000,windturbine,1,2045 +1,10,electricity,all-week,afternoon,2020,all-year,R1,0.13330000000,windturbine,2,2045 +1,10,electricity,all-week,early-peak,2020,all-year,R1,0.20000000000,windturbine,3,2045 +1,10,electricity,all-week,late-peak,2020,all-year,R1,0.40000000000,windturbine,4,2045 +1,10,electricity,all-week,evening,2020,all-year,R1,0.26670000000,windturbine,5,2045 +2,10,electricity,all-week,night,2025,all-year,R1,0.13330000000,windturbine,0,2045 +2,10,electricity,all-week,morning,2025,all-year,R1,0.20000000000,windturbine,1,2045 +2,10,electricity,all-week,afternoon,2025,all-year,R1,0.13330000000,windturbine,2,2045 +2,10,electricity,all-week,early-peak,2025,all-year,R1,0.20000000000,windturbine,3,2045 +2,10,electricity,all-week,late-peak,2025,all-year,R1,0.40000000000,windturbine,4,2045 +2,10,electricity,all-week,evening,2025,all-year,R1,0.26670000000,windturbine,5,2045 +3,10,electricity,all-week,night,2030,all-year,R1,0.26670000000,windturbine,0,2045 +3,10,electricity,all-week,morning,2030,all-year,R1,0.40000000000,windturbine,1,2045 +3,10,electricity,all-week,afternoon,2030,all-year,R1,0.26670000000,windturbine,2,2045 +3,10,electricity,all-week,early-peak,2030,all-year,R1,0.40000000000,windturbine,3,2045 +3,10,electricity,all-week,late-peak,2030,all-year,R1,0.80000000000,windturbine,4,2045 +3,10,electricity,all-week,evening,2030,all-year,R1,0.53330000000,windturbine,5,2045 +4,10,electricity,all-week,night,2035,all-year,R1,0.26670000000,windturbine,0,2045 +4,10,electricity,all-week,morning,2035,all-year,R1,0.40000000000,windturbine,1,2045 +4,10,electricity,all-week,afternoon,2035,all-year,R1,0.26670000000,windturbine,2,2045 +4,10,electricity,all-week,early-peak,2035,all-year,R1,0.40000000000,windturbine,3,2045 +4,10,electricity,all-week,late-peak,2035,all-year,R1,0.80000000000,windturbine,4,2045 +4,10,electricity,all-week,evening,2035,all-year,R1,0.53330000000,windturbine,5,2045 +5,10,electricity,all-week,night,2040,all-year,R1,0.26670000000,windturbine,0,2045 +5,10,electricity,all-week,morning,2040,all-year,R1,0.40000000000,windturbine,1,2045 +5,10,electricity,all-week,afternoon,2040,all-year,R1,0.26670000000,windturbine,2,2045 +5,10,electricity,all-week,early-peak,2040,all-year,R1,0.40000000000,windturbine,3,2045 +5,10,electricity,all-week,late-peak,2040,all-year,R1,0.80000000000,windturbine,4,2045 +5,10,electricity,all-week,evening,2040,all-year,R1,0.53330000000,windturbine,5,2045 +2,10,electricity,all-week,night,2025,all-year,R1,0.13330000000,windturbine,0,2050 +2,10,electricity,all-week,morning,2025,all-year,R1,0.20000000000,windturbine,1,2050 +2,10,electricity,all-week,afternoon,2025,all-year,R1,0.13330000000,windturbine,2,2050 +2,10,electricity,all-week,early-peak,2025,all-year,R1,0.20000000000,windturbine,3,2050 +2,10,electricity,all-week,late-peak,2025,all-year,R1,0.40000000000,windturbine,4,2050 +2,10,electricity,all-week,evening,2025,all-year,R1,0.26670000000,windturbine,5,2050 +3,10,electricity,all-week,night,2030,all-year,R1,0.26670000000,windturbine,0,2050 +3,10,electricity,all-week,morning,2030,all-year,R1,0.40000000000,windturbine,1,2050 +3,10,electricity,all-week,afternoon,2030,all-year,R1,0.26670000000,windturbine,2,2050 +3,10,electricity,all-week,early-peak,2030,all-year,R1,0.40000000000,windturbine,3,2050 +3,10,electricity,all-week,late-peak,2030,all-year,R1,0.80000000000,windturbine,4,2050 +3,10,electricity,all-week,evening,2030,all-year,R1,0.53330000000,windturbine,5,2050 +4,10,electricity,all-week,night,2035,all-year,R1,0.26670000000,windturbine,0,2050 +4,10,electricity,all-week,morning,2035,all-year,R1,0.40000000000,windturbine,1,2050 +4,10,electricity,all-week,afternoon,2035,all-year,R1,0.26670000000,windturbine,2,2050 +4,10,electricity,all-week,early-peak,2035,all-year,R1,0.40000000000,windturbine,3,2050 +4,10,electricity,all-week,late-peak,2035,all-year,R1,0.80000000000,windturbine,4,2050 +4,10,electricity,all-week,evening,2035,all-year,R1,0.53330000000,windturbine,5,2050 +5,10,electricity,all-week,night,2040,all-year,R1,0.26670000000,windturbine,0,2050 +5,10,electricity,all-week,morning,2040,all-year,R1,0.40000000000,windturbine,1,2050 +5,10,electricity,all-week,afternoon,2040,all-year,R1,0.26670000000,windturbine,2,2050 +5,10,electricity,all-week,early-peak,2040,all-year,R1,0.40000000000,windturbine,3,2050 +5,10,electricity,all-week,late-peak,2040,all-year,R1,0.80000000000,windturbine,4,2050 +5,10,electricity,all-week,evening,2040,all-year,R1,0.53330000000,windturbine,5,2050 +6,10,electricity,all-week,night,2045,all-year,R1,0.26670000000,windturbine,0,2050 +6,10,electricity,all-week,morning,2045,all-year,R1,0.40000000000,windturbine,1,2050 +6,10,electricity,all-week,afternoon,2045,all-year,R1,0.26670000000,windturbine,2,2050 +6,10,electricity,all-week,early-peak,2045,all-year,R1,0.40000000000,windturbine,3,2050 +6,10,electricity,all-week,late-peak,2045,all-year,R1,0.80000000000,windturbine,4,2050 +6,10,electricity,all-week,evening,2045,all-year,R1,0.53330000000,windturbine,5,2050 From cc9d2370d1f95d495372f8c92a91d08a3dfd661c Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Mon, 14 Oct 2024 16:22:05 +0100 Subject: [PATCH 31/92] Fix test --- tests/test_demand_share.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/tests/test_demand_share.py b/tests/test_demand_share.py index 9661a180b..1d51a6104 100644 --- a/tests/test_demand_share.py +++ b/tests/test_demand_share.py @@ -345,6 +345,7 @@ def test_unmet_forecast_demand(technologies, coords, timeslice, stock_factory): asia_market = _matching_market(technologies, asia_stock, timeslice) usa_market = _matching_market(technologies, usa_stock, timeslice) market = xr.concat((asia_market, usa_market), dim="region") + current_year = market.year[0] # spoof some agents @dataclass @@ -357,7 +358,9 @@ class Agent: Agent(0.7 * usa_stock.squeeze("region")), Agent(asia_stock.squeeze("region")), ] - result = unmet_forecasted_demand(agents, market, technologies) + result = unmet_forecasted_demand( + agents, market, technologies, current_year=current_year, forecast=5 + ) assert set(result.dims) == set(market.consumption.dims) - {"year"} assert result.values == approx(0) @@ -367,7 +370,9 @@ class Agent: Agent(0.8 * usa_stock.squeeze("region")), Agent(1.1 * asia_stock.squeeze("region")), ] - result = unmet_forecasted_demand(agents, market, technologies) + result = unmet_forecasted_demand( + agents, market, technologies, current_year=current_year, forecast=5 + ) assert set(result.dims) == set(market.consumption.dims) - {"year"} assert result.values == approx(0) @@ -376,7 +381,9 @@ class Agent: Agent(0.5 * usa_stock.squeeze("region")), Agent(0.5 * asia_stock.squeeze("region")), ] - result = unmet_forecasted_demand(agents, market, technologies) + result = unmet_forecasted_demand( + agents, market, technologies, current_year=current_year, forecast=5 + ) comm_usage = technologies.comm_usage.sel(commodity=market.commodity) enduse = is_enduse(comm_usage) assert (result.commodity == comm_usage.commodity).all() From 89f8c618c787fa5abf8053c78c5644f59845a68f Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 16 Oct 2024 10:19:05 +0100 Subject: [PATCH 32/92] Carry changes from fix_supply_issue2 branch --- src/muse/constraints.py | 17 ++++------------- src/muse/costs.py | 10 ++-------- src/muse/demand_share.py | 8 ++------ src/muse/examples.py | 5 +---- src/muse/investments.py | 3 --- src/muse/outputs/mca.py | 5 +---- src/muse/sectors/sector.py | 3 --- 7 files changed, 10 insertions(+), 41 deletions(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index 6a0b12bb6..66c9fedd4 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -745,7 +745,7 @@ def minimum_service( if "region" in search_space.coords and "region" in technologies.dims: kwargs["region"] = assets.region techs = ( - technologies[["fixed_outputs", "utilization_factor", "minimum_service_factor"]] + technologies[["fixed_outputs", "minimum_service_factor"]] .sel(**kwargs) .drop_vars("technology") ) @@ -817,25 +817,16 @@ def lp_costs(technologies: xr.Dataset, costs: xr.DataArray) -> xr.Dataset: assert "year" not in technologies.dims - ts_costs = convert_timeslice(costs) selection = dict( commodity=is_enduse(technologies.comm_usage), technology=technologies.technology.isin(costs.replacement), ) - if "region" in technologies.fixed_outputs.dims and "region" in ts_costs.coords: - selection["region"] = ts_costs.region + if "region" in technologies.fixed_outputs.dims and "region" in costs.coords: + selection["region"] = costs.region fouts = technologies.fixed_outputs.sel(selection).rename(technology="replacement") - # lpcosts.dims = Frozen({'asset': 2, - # 'replacement': 2, - # 'timeslice': 3, - # 'commodity': 1}) - # muse38: lpcosts.dims = Frozen({'asset': 2, , - # 'commodity': 1 - # 'replacement': 2, - # 'timeslice': 3}) - production = zeros_like(ts_costs * fouts) + production = zeros_like(costs * convert_timeslice(fouts)) for dim in production.dims: if isinstance(production.get_index(dim), pd.MultiIndex): production = drop_timeslice(production) diff --git a/src/muse/costs.py b/src/muse/costs.py index bb2e3493c..3091cf94e 100644 --- a/src/muse/costs.py +++ b/src/muse/costs.py @@ -364,17 +364,11 @@ def annual_levelized_cost_of_energy( rates = techs.interest_rate / (1 - (1 + techs.interest_rate) ** (-life)) annualized_capital_costs = ( - convert_timeslice( - techs.cap_par * rates, - ) - / techs.utilization_factor + convert_timeslice(techs.cap_par * rates) / techs.utilization_factor ) o_and_e_costs = ( - convert_timeslice( - (techs.fix_par + techs.var_par), - ) - / techs.utilization_factor + convert_timeslice(techs.fix_par + techs.var_par) / techs.utilization_factor ) fuel_costs = (techs.fixed_inputs * prices).sum("commodity") diff --git a/src/muse/demand_share.py b/src/muse/demand_share.py index 321899916..d8da1d096 100644 --- a/src/muse/demand_share.py +++ b/src/muse/demand_share.py @@ -433,19 +433,15 @@ def unmet_forecasted_demand( ) -> xr.DataArray: """Forecast demand that cannot be serviced by non-decommissioned current assets.""" from muse.commodities import is_enduse - from muse.timeslices import convert_timeslice from muse.utilities import reduce_assets year = current_year + forecast comm_usage = technologies.comm_usage.sel(commodity=market.commodity) smarket: xr.Dataset = market.where(is_enduse(comm_usage), 0).interp(year=year) capacity = reduce_assets([u.assets.capacity.interp(year=year) for u in agents]) - ts_capacity = cast( - xr.DataArray, - convert_timeslice(capacity), - ) + capa = cast(xr.DataArray, capacity) - result = unmet_demand(smarket, ts_capacity, technologies, production) + result = unmet_demand(smarket, capa, technologies, production) if "year" in result.dims: result = result.squeeze("year") return result diff --git a/src/muse/examples.py b/src/muse/examples.py index 236ac9f38..9f85c4b3d 100644 --- a/src/muse/examples.py +++ b/src/muse/examples.py @@ -241,7 +241,6 @@ def matching_market(sector: str, model: str = "default") -> xr.Dataset: from muse.examples import sector as load_sector from muse.quantities import consumption, maximum_production from muse.sectors import Sector - from muse.timeslices import convert_timeslice from muse.utilities import agent_concatenation loaded_sector = cast(Sector, load_sector(sector, model)) @@ -250,9 +249,7 @@ def matching_market(sector: str, model: str = "default") -> xr.Dataset: market = xr.Dataset() production = cast( xr.DataArray, - convert_timeslice( - maximum_production(loaded_sector.technologies, assets.capacity), - ), + maximum_production(loaded_sector.technologies, assets.capacity), ) market["supply"] = production.sum("asset") if "dst_region" in market.dims: diff --git a/src/muse/investments.py b/src/muse/investments.py index 6fed448a2..87ab26ce9 100644 --- a/src/muse/investments.py +++ b/src/muse/investments.py @@ -227,7 +227,6 @@ def adhoc_match_demand( ) -> xr.DataArray: from muse.demand_matching import demand_matching from muse.quantities import capacity_in_use, maximum_production - from muse.timeslices import convert_timeslice demand = next(c for c in constraints if c.name == "demand").b @@ -239,8 +238,6 @@ def adhoc_match_demand( technology=costs.replacement, commodity=demand.commodity, ).drop_vars("technology") - if "timeslice" in demand.dims and "timeslice" not in max_prod.dims: - max_prod = convert_timeslice(max_prod) # Push disabled techs to last rank. # Any production assigned to them by the demand-matching algorithm will be removed. diff --git a/src/muse/outputs/mca.py b/src/muse/outputs/mca.py index 128c00ea7..9322f2ebb 100644 --- a/src/muse/outputs/mca.py +++ b/src/muse/outputs/mca.py @@ -776,10 +776,7 @@ def sector_capital_costs( year=output_year, technology=capacity.technology, ) - result = data.cap_par * (capacity**data.cap_exp) - data_agent = convert_timeslice( - result, - ) + data_agent = convert_timeslice(data.cap_par * (capacity**data.cap_exp)) data_agent["agent"] = a.name data_agent["category"] = a.category data_agent["sector"] = getattr(sector, "name", "unnamed") diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index f9a9f70d0..ed1156c81 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -283,7 +283,6 @@ def market_variables(self, market: xr.Dataset, technologies: xr.Dataset) -> Any: from muse.commodities import is_pollutant from muse.costs import annual_levelized_cost_of_energy, supply_cost from muse.quantities import consumption - from muse.timeslices import convert_timeslice from muse.utilities import broadcast_techs years = market.year.values @@ -293,8 +292,6 @@ def market_variables(self, market: xr.Dataset, technologies: xr.Dataset) -> Any: supply = self.supply_prod( market=market, capacity=capacity, technologies=technologies ) - if "timeslice" in market.prices.dims and "timeslice" not in supply.dims: - supply = convert_timeslice(supply) # Calculate consumption consume = consumption(technologies, supply, market.prices) From 793aacf0de2460717ba8d5b6abb83f129ba6251d Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 16 Oct 2024 10:27:09 +0100 Subject: [PATCH 33/92] More benign changes --- src/muse/quantities.py | 25 ++++++++----------------- 1 file changed, 8 insertions(+), 17 deletions(-) diff --git a/src/muse/quantities.py b/src/muse/quantities.py index 12dd7e8f8..0b0891d5c 100644 --- a/src/muse/quantities.py +++ b/src/muse/quantities.py @@ -267,7 +267,6 @@ def consumption( are not given, then flexible consumption is *not* considered. """ from muse.commodities import is_enduse, is_fuel - from muse.timeslices import convert_timeslice from muse.utilities import filter_with_template params = filter_with_template( @@ -279,10 +278,6 @@ def consumption( comm_usage = technologies.comm_usage.sel(commodity=production.commodity) production = production.sel(commodity=is_enduse(comm_usage)).sum("commodity") - - if prices is not None and "timeslice" in prices.dims: - production = convert_timeslice(production) # type: ignore - params_fuels = is_fuel(params.comm_usage) consumption = production * params.fixed_inputs.where(params_fuels, 0) @@ -376,15 +371,12 @@ def demand_matched_production( """ from muse.costs import annual_levelized_cost_of_energy as ALCOE from muse.demand_matching import demand_matching - from muse.timeslices import convert_timeslice from muse.utilities import broadcast_techs technodata = cast(xr.Dataset, broadcast_techs(technologies, capacity)) cost = ALCOE(prices=prices, technologies=technodata, **filters) max_production = maximum_production(technodata, capacity, **filters) assert ("timeslice" in demand.dims) == ("timeslice" in cost.dims) - if "timeslice" in demand.dims and "timeslice" not in max_production.dims: - max_production = convert_timeslice(max_production) return demand_matching(demand, cost, max_production) @@ -523,12 +515,11 @@ def capacity_to_service_demand( technologies: xr.Dataset, ) -> xr.DataArray: """Minimum capacity required to fulfill the demand.""" - from muse.timeslices import TIMESLICE - - max_hours = TIMESLICE.max() / TIMESLICE.sum() - commodity_output = technologies.fixed_outputs.sel(commodity=demand.commodity) - max_demand = ( - demand.where(commodity_output > 0, 0) - / commodity_output.where(commodity_output > 0, 1) - ).max(("commodity", "timeslice")) - return max_demand / technologies.utilization_factor / max_hours + from muse.timeslices import convert_timeslice + + timeslice_outputs = ( + convert_timeslice(technologies.fixed_outputs.sel(commodity=demand.commodity)) + * technologies.utilization_factor + ) + capa_to_service_demand = demand / timeslice_outputs + return capa_to_service_demand.max(("commodity", "timeslice")) From 22c141e22acb9cf2674f666b7903d2cda6a36efd Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 16 Oct 2024 14:36:52 +0100 Subject: [PATCH 34/92] Fix incorrect convert_timeslice usage in tests --- src/muse/timeslices.py | 6 +++--- tests/test_constraints.py | 9 ++------- tests/test_costs.py | 11 +++++----- tests/test_demand_share.py | 16 ++++----------- tests/test_quantities.py | 41 ++++++++++---------------------------- tests/test_readers.py | 7 ------- tests/test_timeslices.py | 12 +++++------ tests/test_trade.py | 3 +-- 8 files changed, 31 insertions(+), 74 deletions(-) diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index 2a5f420dc..f790ab8e8 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -1,7 +1,7 @@ """Timeslice utility functions.""" __all__ = [ - "reference_timeslice", + "read_timeslices", "convert_timeslice", "drop_timeslice", "setup_module", @@ -63,7 +63,7 @@ """ -def reference_timeslice( +def read_timeslices( settings: Union[Mapping, str], level_names: Sequence[str] = ("month", "day", "hour"), name: str = "timeslice", @@ -144,7 +144,7 @@ def reference_timeslice( def setup_module(settings: Union[str, Mapping]): """Sets up module singletons.""" global TIMESLICE - TIMESLICE = reference_timeslice(settings) + TIMESLICE = read_timeslices(settings) @unique diff --git a/tests/test_constraints.py b/tests/test_constraints.py index 0aee1af5c..64e645883 100644 --- a/tests/test_constraints.py +++ b/tests/test_constraints.py @@ -70,17 +70,12 @@ def assets(residential): @fixture -def market_demand(assets, technologies, market): +def market_demand(assets, technologies): from muse.quantities import maximum_production - from muse.timeslices import QuantityType, convert_timeslice return 0.8 * maximum_production( technologies.interp(year=2025), - convert_timeslice( - assets.capacity.sel(year=2025).groupby("technology").sum("asset"), - market, - QuantityType.INTENSIVE, - ), + assets.capacity.sel(year=2025).groupby("technology").sum("asset"), ).rename(technology="asset") diff --git a/tests/test_costs.py b/tests/test_costs.py index 6d90066ee..715239102 100644 --- a/tests/test_costs.py +++ b/tests/test_costs.py @@ -18,14 +18,13 @@ def _capacity(technologies, demand_share): @fixture -def _production(technologies, _capacity, demand_share): - from muse.timeslices import QuantityType, convert_timeslice +def _production(technologies, _capacity): + from muse.timeslices import convert_timeslice production = ( - _capacity * technologies.fixed_outputs * technologies.utilization_factor - ) - production = convert_timeslice( - production, demand_share.timeslice, QuantityType.INTENSIVE + _capacity + * convert_timeslice(technologies.fixed_outputs) + * technologies.utilization_factor ) return production diff --git a/tests/test_demand_share.py b/tests/test_demand_share.py index 8e21b8cdf..65f8ddc02 100644 --- a/tests/test_demand_share.py +++ b/tests/test_demand_share.py @@ -14,19 +14,14 @@ def matching_market(technologies, stock, timeslice): ) -def _matching_market(technologies, stock, timeslice): +def _matching_market(technologies, stock): """A market which matches stocks exactly.""" from numpy.random import random from muse.quantities import consumption, maximum_production - from muse.timeslices import QuantityType, convert_timeslice market = xr.Dataset() - production = convert_timeslice( - maximum_production(technologies, stock.capacity), - timeslice, - QuantityType.INTENSIVE, - ) + production = maximum_production(technologies, stock.capacity) market["supply"] = production.sum("asset") market["consumption"] = drop_timeslice( consumption(technologies, production).sum("asset") + market.supply @@ -126,7 +121,6 @@ def test_new_retro_split_zero_new_unmet(technologies, stock, matching_market): def test_new_retro_accounting_identity(technologies, stock, market): from muse.demand_share import new_and_retro_demands from muse.production import factory - from muse.timeslices import QuantityType, convert_timeslice share = new_and_retro_demands( stock.capacity, market, technologies, current_year=2010, forecast=5 @@ -134,14 +128,12 @@ def test_new_retro_accounting_identity(technologies, stock, market): assert (share >= 0).all() production_method = factory() - serviced = convert_timeslice( + serviced = ( production_method( market.interp(year=2015), stock.capacity.interp(year=2015), technologies ) .groupby("region") - .sum("asset"), - market.timeslice, - QuantityType.INTENSIVE, + .sum("asset") ) consumption = market.consumption.interp(year=2015) diff --git a/tests/test_quantities.py b/tests/test_quantities.py index 0479e1a1f..27f22f7a1 100644 --- a/tests/test_quantities.py +++ b/tests/test_quantities.py @@ -403,7 +403,6 @@ def test_demand_matched_production( ): from muse.commodities import CommodityUsage, is_enduse from muse.quantities import demand_matched_production, maximum_production - from muse.timeslices import QuantityType, convert_timeslice # try and make sure we have a few more outputs than the default fixture technologies.comm_usage[:] = np.random.choice( @@ -414,11 +413,8 @@ def test_demand_matched_production( technologies.fixed_outputs[:] *= is_enduse(technologies.comm_usage) capacity = capacity.sel(year=capacity.year.min(), drop=True) - max_prod = convert_timeslice( - maximum_production(technologies, capacity), - demand.timeslice, - QuantityType.INTENSIVE, - ) + max_prod = maximum_production(technologies, capacity) + demand = max_prod.sum("asset") demand[:] *= np.random.choice([0, 1, 1 / 2, 1 / 3, 1 / 10], demand.shape) prices = xr.zeros_like(demand) @@ -434,7 +430,6 @@ def test_costed_production_exact_match(market, capacity, technologies): costed_production, maximum_production, ) - from muse.timeslices import QuantityType, convert_timeslice from muse.utilities import broadcast_techs if set(capacity.region.values) != set(market.region.values): @@ -445,13 +440,11 @@ def test_costed_production_exact_match(market, capacity, technologies): costs = annual_levelized_cost_of_energy( prices=market.prices.sel(region=technodata.region), technologies=technodata ) - maxdemand = convert_timeslice( + maxdemand = ( xr.Dataset(dict(mp=maximum_production(technologies, capacity))) .groupby("region") .sum("asset") - .mp, - market, - QuantityType.INTENSIVE, + .mp ) market["consumption"] = drop_timeslice(maxdemand) result = costed_production(market.consumption, costs, capacity, technologies) @@ -469,17 +462,12 @@ def test_costed_production_single_region(market, capacity, technologies): costed_production, maximum_production, ) - from muse.timeslices import QuantityType, convert_timeslice from muse.utilities import broadcast_techs capacity = capacity.drop_vars("region") capacity["region"] = "USA" market = market.sel(region=[capacity.region.values]) - maxdemand = convert_timeslice( - maximum_production(technologies, capacity).sum("asset"), - market, - QuantityType.INTENSIVE, - ) + maxdemand = maximum_production(technologies, capacity).sum("asset") market["consumption"] = drop_timeslice(0.9 * maxdemand) technodata = broadcast_techs(technologies, capacity) costs = annual_levelized_cost_of_energy( @@ -500,18 +488,15 @@ def test_costed_production_single_year(market, capacity, technologies): costed_production, maximum_production, ) - from muse.timeslices import QuantityType, convert_timeslice from muse.utilities import broadcast_techs capacity = capacity.sel(year=2010) market = market.sel(year=2010) - maxdemand = convert_timeslice( + maxdemand = ( xr.Dataset(dict(mp=maximum_production(technologies, capacity))) .groupby("region") .sum("asset") - .mp, - market, - QuantityType.INTENSIVE, + .mp ) market["consumption"] = drop_timeslice(0.9 * maxdemand) technodata = broadcast_techs(technologies, capacity) @@ -533,7 +518,6 @@ def test_costed_production_over_capacity(market, capacity, technologies): costed_production, maximum_production, ) - from muse.timeslices import QuantityType, convert_timeslice from muse.utilities import broadcast_techs capacity = capacity.isel(asset=[0, 1, 2]) @@ -541,13 +525,11 @@ def test_costed_production_over_capacity(market, capacity, technologies): capacity.region.values[: len(set(market.region.values))] = list( set(market.region.values) ) - maxdemand = convert_timeslice( + maxdemand = ( xr.Dataset(dict(mp=maximum_production(technologies, capacity))) .groupby("region") .sum("asset") - .mp, - market, - QuantityType.INTENSIVE, + .mp ) market["consumption"] = drop_timeslice(maxdemand * 0.9) technodata = broadcast_techs(technologies, capacity) @@ -569,7 +551,6 @@ def test_costed_production_with_minimum_service(market, capacity, technologies, costed_production, maximum_production, ) - from muse.timeslices import QuantityType, convert_timeslice from muse.utilities import broadcast_techs if set(capacity.region.values) != set(market.region.values): @@ -580,9 +561,7 @@ def test_costed_production_with_minimum_service(market, capacity, technologies, technologies.utilization_factor.dims, rng.uniform(low=0.5, high=0.9, size=technologies.utilization_factor.shape), ) - maxprod = convert_timeslice( - maximum_production(technologies, capacity), market, QuantityType.INTENSIVE - ) + maxprod = maximum_production(technologies, capacity) minprod = maxprod * broadcast_techs(technologies.minimum_service_factor, maxprod) maxdemand = xr.Dataset(dict(mp=minprod)).groupby("region").sum("asset").mp market["consumption"] = drop_timeslice(maxdemand * 0.9) diff --git a/tests/test_readers.py b/tests/test_readers.py index f98d2b6ae..69ff7ce7b 100644 --- a/tests/test_readers.py +++ b/tests/test_readers.py @@ -134,13 +134,6 @@ def test_check_foresight(settings: dict): check_foresight(settings) -def test_check_time_slices(settings: dict): - """Tests the check_budget_parameters function.""" - from muse.readers.toml import check_time_slices - - check_time_slices(settings) - - def test_check_global_data_files(settings: dict, user_data_files): """Tests the check_global_data_files function.""" from muse.readers.toml import check_global_data_files diff --git a/tests/test_timeslices.py b/tests/test_timeslices.py index c8de264dc..a704969e0 100644 --- a/tests/test_timeslices.py +++ b/tests/test_timeslices.py @@ -26,9 +26,9 @@ def toml(): @fixture def reference(toml): - from muse.timeslices import reference_timeslice + from muse.timeslices import read_timeslices - return reference_timeslice(toml) + return read_timeslices(toml) @fixture @@ -54,7 +54,7 @@ def timeslice_dataarray(reference): def test_reference_timeslice(): from toml import loads - from muse.timeslices import reference_timeslice + from muse.timeslices import read_timeslices inputs = loads( """ @@ -80,7 +80,7 @@ def test_reference_timeslice(): """ ) - ts = reference_timeslice(inputs) + ts = read_timeslices(inputs) assert isinstance(ts, DataArray) assert "timeslice" in ts.coords @@ -88,10 +88,10 @@ def test_reference_timeslice(): def test_no_overlap(): from pytest import raises - from muse.timeslices import reference_timeslice + from muse.timeslices import read_timeslices with raises(ValueError): - reference_timeslice( + read_timeslices( """ [timeslices] winter.weekday.night = 396 diff --git a/tests/test_trade.py b/tests/test_trade.py index bafa07db9..2398b47d9 100644 --- a/tests/test_trade.py +++ b/tests/test_trade.py @@ -102,14 +102,13 @@ def test_lp_costs(): technologies = examples.technodata("power", model="trade") search_space = examples.search_space("power", model="trade") - timeslices = examples.sector("power", model="trade").timeslices costs = ( search_space * np.arange(np.prod(search_space.shape)).reshape(search_space.shape) * xr.ones_like(technologies.dst_region) ) - lpcosts = lp_costs(technologies.sel(year=2020, drop=True), costs, timeslices) + lpcosts = lp_costs(technologies.sel(year=2020, drop=True), costs) assert "capacity" in lpcosts.data_vars assert "production" in lpcosts.data_vars assert set(lpcosts.capacity.dims) == {"agent", "replacement", "dst_region"} From a59580cc7144bf5add7f2dab92d392471502110e Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 16 Oct 2024 14:42:42 +0100 Subject: [PATCH 35/92] Fix timeslice import in tests --- tests/conftest.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index efc0e38f8..698b8db20 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -169,9 +169,9 @@ def default_timeslice_globals(save_timeslice_globals): @fixture def timeslice(default_timeslice_globals) -> Dataset: - from muse.readers.toml import read_timeslices + from muse.timeslices import TIMESLICE - return read_timeslices(dict(hour=["all-day"])) + return TIMESLICE @fixture From 993af9f9b4766b6f798aaf158173025d430ec541 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 16 Oct 2024 15:02:09 +0100 Subject: [PATCH 36/92] Delete unused fixture --- tests/conftest.py | 27 +-------------------------- 1 file changed, 1 insertion(+), 26 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 698b8db20..b73ade597 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -152,16 +152,7 @@ def pytest_collection_modifyitems(config, items): @fixture -def save_timeslice_globals(): - from muse import timeslices - - old = timeslices.TIMESLICE, timeslices.TRANSFORMS - yield - timeslices.TIMESLICE, timeslices.TRANSFORMS = old - - -@fixture -def default_timeslice_globals(save_timeslice_globals): +def default_timeslice_globals(): from muse import timeslices timeslices.setup_module(timeslices.DEFAULT_TIMESLICE_DESCRIPTION) @@ -174,22 +165,6 @@ def timeslice(default_timeslice_globals) -> Dataset: return TIMESLICE -@fixture -def other_timeslice() -> Dataset: - from pandas import MultiIndex - - months = ["winter", "spring-autumn", "summer"] - days = ["all-week", "all-week", "all-week"] - hour = ["all-day", "all-day", "all-day"] - coordinates = MultiIndex.from_arrays( - [months, days, hour], names=("month", "day", "hour") - ) - result = Dataset(coords={"timeslice": coordinates}) - result["represent_hours"] = ("timeslice", [2920, 2920, 2920]) - result = result.set_coords("represent_hours") - return result - - @fixture def coords() -> Mapping: """Technoeconomics coordinates.""" From 908872a8c95d795521ad5f1da38c91f28138131b Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 16 Oct 2024 15:11:33 +0100 Subject: [PATCH 37/92] Fix market fixtures --- tests/conftest.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index b73ade597..703f025a9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -303,7 +303,7 @@ def var(*dims, factor=100.0): def agent_market(coords, technologies, timeslice) -> Dataset: from numpy.random import rand - result = timeslice.copy() + result = Dataset(coords=timeslice.coords) result["commodity"] = "commodity", coords["commodity"] result["region"] = "region", coords["region"] result["technology"] = "technology", coords["technology"] @@ -325,7 +325,7 @@ def var(*dims, factor=100.0): def market(coords, technologies, timeslice) -> Dataset: from numpy.random import rand - result = timeslice.copy() + result = Dataset(coords=timeslice.coords) result["commodity"] = "commodity", coords["commodity"] result["region"] = "region", coords["region"] result["year"] = "year", coords["year"] From e0a8c3a1a31c88a9a1ac004e14a0def3df6af382 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 16 Oct 2024 15:23:34 +0100 Subject: [PATCH 38/92] More test fixes --- tests/conftest.py | 1 - tests/test_demand_share.py | 12 ++++++------ tests/test_fullsim_regression.py | 2 -- tests/test_quantities.py | 9 ++------- 4 files changed, 8 insertions(+), 16 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 703f025a9..8dc90a576 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -490,7 +490,6 @@ def demand_share(coords, timeslice): } shape = len(axes["commodity"]), len(axes["asset"]), len(axes["timeslice"]) result = DataArray(rand(*shape), coords=axes, dims=axes.keys(), name="demand_share") - result.coords["represent_hours"] = timeslice.represent_hours return result diff --git a/tests/test_demand_share.py b/tests/test_demand_share.py index 65f8ddc02..b2beee5d5 100644 --- a/tests/test_demand_share.py +++ b/tests/test_demand_share.py @@ -227,8 +227,8 @@ def test_new_retro_demand_share(technologies, coords, market, timeslice, stock_f asia_stock = stock_factory(coords, technologies).expand_dims(region=["ASEAN"]) usa_stock = stock_factory(coords, technologies).expand_dims(region=["USA"]) - asia_market = _matching_market(technologies, asia_stock, timeslice) - usa_market = _matching_market(technologies, usa_stock, timeslice) + asia_market = _matching_market(technologies, asia_stock) + usa_market = _matching_market(technologies, usa_stock) market = xr.concat((asia_market, usa_market), dim="region") market.consumption.loc[{"year": 2031}] *= 2 @@ -281,8 +281,8 @@ def test_standard_demand_share(technologies, coords, market, timeslice, stock_fa asia_stock = stock_factory(coords, technologies).expand_dims(region=["ASEAN"]) usa_stock = stock_factory(coords, technologies).expand_dims(region=["USA"]) - asia_market = _matching_market(technologies, asia_stock, timeslice) - usa_market = _matching_market(technologies, usa_stock, timeslice) + asia_market = _matching_market(technologies, asia_stock) + usa_market = _matching_market(technologies, usa_stock) market = xr.concat((asia_market, usa_market), dim="region") market.consumption.loc[{"year": 2031}] *= 2 @@ -334,8 +334,8 @@ def test_unmet_forecast_demand(technologies, coords, timeslice, stock_factory): asia_stock = stock_factory(coords, technologies).expand_dims(region=["ASEAN"]) usa_stock = stock_factory(coords, technologies).expand_dims(region=["USA"]) - asia_market = _matching_market(technologies, asia_stock, timeslice) - usa_market = _matching_market(technologies, usa_stock, timeslice) + asia_market = _matching_market(technologies, asia_stock) + usa_market = _matching_market(technologies, usa_stock) market = xr.concat((asia_market, usa_market), dim="region") # spoof some agents diff --git a/tests/test_fullsim_regression.py b/tests/test_fullsim_regression.py index 973d36ff6..d508b7447 100644 --- a/tests/test_fullsim_regression.py +++ b/tests/test_fullsim_regression.py @@ -5,7 +5,6 @@ from muse.examples import available_examples -@mark.usefixtures("save_timeslice_globals") @mark.regression @mark.example @mark.parametrize("model", available_examples()) @@ -40,7 +39,6 @@ def available_tutorials(): return [d.parent for d in base_path.rglob("*/input") if d.is_dir()] -@mark.usefixtures("save_timeslice_globals") @mark.regression @mark.tutorial @mark.parametrize("tutorial_path", available_tutorials()) diff --git a/tests/test_quantities.py b/tests/test_quantities.py index 27f22f7a1..67c1d904e 100644 --- a/tests/test_quantities.py +++ b/tests/test_quantities.py @@ -89,6 +89,7 @@ def test_supply_emissions(technologies, capacity): def test_gross_margin(technologies, capacity, market): from muse.commodities import is_enduse, is_fuel, is_pollutant from muse.quantities import gross_margin + from muse.timeslices import convert_timeslice """ Gross margin refers to the calculation @@ -118,12 +119,7 @@ def test_gross_margin(technologies, capacity, market): revenues = prices * prod * sum(is_enduse(usage)) env_costs = env_prices * envs * sum(is_pollutant(usage)) cons_costs = prices * fuels * sum(is_fuel(usage)) - var_costs = ( - vp - * ((prod * sum(is_enduse(usage))) ** ve) - * market.represent_hours - / sum(market.represent_hours) - ) + var_costs = convert_timeslice(vp * ((prod * sum(is_enduse(usage))) ** ve)) expected = revenues - env_costs - cons_costs - var_costs expected *= 100 / revenues @@ -177,7 +173,6 @@ def test_consumption_no_flex(technologies, production, market): technologies.flexible_inputs[:] = 0 actual = consumption(technologies, production, market.prices) - expected = expected * market.represent_hours / market.represent_hours.sum() actual, expected = xr.broadcast(actual, expected) assert actual.values == approx(expected.values) From 803480717917da2520e8efafdc5ebed176c76b3f Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 16 Oct 2024 15:47:45 +0100 Subject: [PATCH 39/92] Fix a fixture --- tests/test_demand_share.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_demand_share.py b/tests/test_demand_share.py index b2beee5d5..17e73b248 100644 --- a/tests/test_demand_share.py +++ b/tests/test_demand_share.py @@ -8,7 +8,7 @@ def matching_market(technologies, stock, timeslice): """A market which matches stocks exactly.""" return ( - _matching_market(technologies, stock, timeslice) + _matching_market(technologies, stock) .interp(year=[2010, 2015, 2020, 2025]) .transpose("timeslice", "region", "commodity", "year") ) From 1bd7c84493b0838e097820845d0394241dc728a9 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 16 Oct 2024 16:26:39 +0100 Subject: [PATCH 40/92] Move default timeslice settings to conftest --- src/muse/timeslices.py | 45 --------------------------------------- tests/conftest.py | 46 ++++++++++++++++++++++++++++++++++++++-- tests/test_quantities.py | 2 +- tests/test_readers.py | 10 ++++----- 4 files changed, 49 insertions(+), 54 deletions(-) diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index f790ab8e8..8f324bfb7 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -20,48 +20,6 @@ TRANSFORMS: dict[tuple, ndarray] = None # type: ignore """Transforms from each aggregate to the finest timeslice.""" -DEFAULT_TIMESLICE_DESCRIPTION = """ - [timeslices] - winter.weekday.night = 396 - winter.weekday.morning = 396 - winter.weekday.afternoon = 264 - winter.weekday.early-peak = 66 - winter.weekday.late-peak = 66 - winter.weekday.evening = 396 - winter.weekend.night = 156 - winter.weekend.morning = 156 - winter.weekend.afternoon = 156 - winter.weekend.evening = 156 - spring-autumn.weekday.night = 792 - spring-autumn.weekday.morning = 792 - spring-autumn.weekday.afternoon = 528 - spring-autumn.weekday.early-peak = 132 - spring-autumn.weekday.late-peak = 132 - spring-autumn.weekday.evening = 792 - spring-autumn.weekend.night = 300 - spring-autumn.weekend.morning = 300 - spring-autumn.weekend.afternoon = 300 - spring-autumn.weekend.evening = 300 - summer.weekday.night = 396 - summer.weekday.morning = 396 - summer.weekday.afternoon = 264 - summer.weekday.early-peak = 66 - summer.weekday.late-peak = 66 - summer.weekday.evening = 396 - summer.weekend.night = 150 - summer.weekend.morning = 150 - summer.weekend.afternoon = 150 - summer.weekend.evening = 150 - level_names = ["month", "day", "hour"] - - [timeslices.aggregates] - all-day = [ - "night", "morning", "afternoon", "early-peak", "late-peak", "evening", "night" - ] - all-week = ["weekday", "weekend"] - all-year = ["winter", "summer", "spring-autumn"] - """ - def read_timeslices( settings: Union[Mapping, str], @@ -195,6 +153,3 @@ def drop_timeslice(data: DataArray) -> DataArray: return data return data.drop_vars(data.timeslice.indexes) - - -setup_module(DEFAULT_TIMESLICE_DESCRIPTION) diff --git a/tests/conftest.py b/tests/conftest.py index 8dc90a576..9984599a5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -153,9 +153,51 @@ def pytest_collection_modifyitems(config, items): @fixture def default_timeslice_globals(): - from muse import timeslices + from muse.timeslices import setup_module + + default_timeslices = """ + [timeslices] + winter.weekday.night = 396 + winter.weekday.morning = 396 + winter.weekday.afternoon = 264 + winter.weekday.early-peak = 66 + winter.weekday.late-peak = 66 + winter.weekday.evening = 396 + winter.weekend.night = 156 + winter.weekend.morning = 156 + winter.weekend.afternoon = 156 + winter.weekend.evening = 156 + spring-autumn.weekday.night = 792 + spring-autumn.weekday.morning = 792 + spring-autumn.weekday.afternoon = 528 + spring-autumn.weekday.early-peak = 132 + spring-autumn.weekday.late-peak = 132 + spring-autumn.weekday.evening = 792 + spring-autumn.weekend.night = 300 + spring-autumn.weekend.morning = 300 + spring-autumn.weekend.afternoon = 300 + spring-autumn.weekend.evening = 300 + summer.weekday.night = 396 + summer.weekday.morning = 396 + summer.weekday.afternoon = 264 + summer.weekday.early-peak = 66 + summer.weekday.late-peak = 66 + summer.weekday.evening = 396 + summer.weekend.night = 150 + summer.weekend.morning = 150 + summer.weekend.afternoon = 150 + summer.weekend.evening = 150 + level_names = ["month", "day", "hour"] + + [timeslices.aggregates] + all-day = [ + "night", "morning", "afternoon", "early-peak", "late-peak", "evening", "night" + ] + all-week = ["weekday", "weekend"] + all-year = ["winter", "summer", "spring-autumn"] + """ - timeslices.setup_module(timeslices.DEFAULT_TIMESLICE_DESCRIPTION) + setup_module(default_timeslices) @fixture diff --git a/tests/test_quantities.py b/tests/test_quantities.py index 67c1d904e..0c6410f2b 100644 --- a/tests/test_quantities.py +++ b/tests/test_quantities.py @@ -139,7 +139,7 @@ def test_decommissioning_demand(technologies, capacity): technologies.fixed_outputs[:] = fouts = 0.5 technologies.utilization_factor[:] = ufac = 0.4 decom = decommissioning_demand(technologies, capacity, years) - assert set(decom.dims) == {"asset", "commodity", "year"} + assert set(decom.dims) == {"asset", "commodity", "year", "timeslice"} assert decom.sel(commodity=is_enduse(technologies.comm_usage)).values == approx( ufac * fouts * (current - forecast) ) diff --git a/tests/test_readers.py b/tests/test_readers.py index 69ff7ce7b..16e36f3bb 100644 --- a/tests/test_readers.py +++ b/tests/test_readers.py @@ -489,12 +489,12 @@ def test_read_technodata_timeslices(tmp_path): month_values = ["all-year"] * 6 day_values = ["all-week"] * 6 hour_values = [ + "night", + "morning", "afternoon", "early-peak", - "evening", "late-peak", - "morning", - "night", + "evening", ] assert list(data.coords["timeslice"].values) == list( @@ -598,11 +598,9 @@ def test_read_csv_agent_parameters(default_model): def test_read_initial_market(default_model): from muse.readers.csv import read_initial_market - from muse.readers.toml import read_settings - settings = read_settings(default_model / "settings.toml") path = default_model / "input" / "Projections.csv" - data = read_initial_market(path, timeslices=settings.timeslices) + data = read_initial_market(path) assert isinstance(data, xr.Dataset) assert set(data.dims) == {"region", "year", "commodity", "timeslice"} From c616694b850c24004715b10b7fbb4a5e65e50f20 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 17 Oct 2024 09:43:02 +0100 Subject: [PATCH 41/92] Fix docstring tests --- src/muse/constraints.py | 12 +++--------- src/muse/demand_share.py | 2 +- 2 files changed, 4 insertions(+), 10 deletions(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index 66c9fedd4..2514049e6 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -772,7 +772,6 @@ def lp_costs(technologies: xr.Dataset, costs: xr.DataArray) -> xr.Dataset: >>> from muse import examples >>> technologies = examples.technodata("residential", model="medium") >>> search_space = examples.search_space("residential", model="medium") - >>> timeslices = examples.sector("residential", model="medium").timeslices >>> costs = ( ... search_space ... * np.arange(np.prod(search_space.shape)).reshape(search_space.shape) @@ -808,7 +807,7 @@ def lp_costs(technologies: xr.Dataset, costs: xr.DataArray) -> xr.Dataset: which production occurs and the ``commodity`` produced. >>> lpcosts.production.dims - ('timeslice', 'asset', 'replacement', 'commodity') + ('asset', 'replacement', 'timeslice', 'commodity') """ from xarray import zeros_like @@ -956,7 +955,6 @@ def lp_constraint_matrix( ... .sel(region=assets.region) ... ), ... costs=search * np.arange(np.prod(search.shape)).reshape(search.shape), - ... timeslices=market.timeslice, ... ) For a simple example, we can first check the case where b is scalar. The result @@ -1076,7 +1074,6 @@ class ScipyAdapter: >>> from muse import examples >>> from muse.quantities import maximum_production - >>> from muse.timeslices import convert_timeslice >>> from muse import constraints as cs >>> res = examples.sector("residential", model="medium") >>> market = examples.residential_market("medium") @@ -1084,10 +1081,7 @@ class ScipyAdapter: >>> assets = next(a.assets for a in res.agents) >>> market_demand = 0.8 * maximum_production( ... res.technologies.interp(year=2025), - ... convert_timeslice( - ... assets.capacity.sel(year=2025).groupby("technology").sum("asset"), - ... market.timeslice, - ... ), + ... assets.capacity.sel(year=2025).groupby("technology").sum("asset"), ... ).rename(technology="asset") >>> costs = search * np.arange(np.prod(search.shape)).reshape(search.shape) >>> constraint = cs.max_capacity_expansion( @@ -1123,7 +1117,7 @@ class ScipyAdapter: >>> technologies = res.technologies.interp(year=market.year.min() + 5) >>> inputs = cs.ScipyAdapter.factory( - ... technologies, costs, market.timeslice, constraint + ... technologies, costs, constraint ... ) The decision variables are always constrained between zero and infinity: diff --git a/src/muse/demand_share.py b/src/muse/demand_share.py index d8da1d096..cad269a65 100644 --- a/src/muse/demand_share.py +++ b/src/muse/demand_share.py @@ -145,7 +145,7 @@ def new_and_retro( A_{a, s}^r = w_s\sum_i A_a^{r, i} with :math:`w_s` a weight associated with each timeslice and determined via - :py:func:`muse.timeslices.convert_timeslice_new`. + :py:func:`muse.timeslices.convert_timeslice`. #. An intermediate quantity, the :py:func:`unmet demand ` :math:`U` is defined from From d054a3ba9222f602f274c9886dce59975a14926f Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 17 Oct 2024 15:09:31 +0100 Subject: [PATCH 42/92] A few more tiny changes (e.g. typing) --- src/muse/agents/agent.py | 12 ++++++------ src/muse/demand_share.py | 3 +-- src/muse/sectors/sector.py | 4 +--- src/muse/sectors/subsector.py | 2 +- 4 files changed, 9 insertions(+), 12 deletions(-) diff --git a/src/muse/agents/agent.py b/src/muse/agents/agent.py index 836adbab6..7e69d463a 100644 --- a/src/muse/agents/agent.py +++ b/src/muse/agents/agent.py @@ -81,7 +81,7 @@ def next( market: xr.Dataset, demand: xr.DataArray, time_period: int, - ): + ) -> None: """Increments agent to the next time point (e.g. performing investments).""" def __repr__(self): @@ -243,7 +243,7 @@ def next( market: xr.Dataset, demand: xr.DataArray, time_period: int, - ): + ) -> None: self.year += time_period def compute_decision( @@ -251,8 +251,8 @@ def compute_decision( technologies: xr.Dataset, market: xr.Dataset, demand: xr.DataArray, - search_space, - ): + search_space: xr.DataArray, + ) -> xr.DataArray: # Filter technologies according to the search space, forecast year and region techs = self.filter_input( technologies, @@ -319,7 +319,7 @@ def next( market: xr.Dataset, demand: xr.DataArray, time_period: int, - ): + ) -> None: """Iterates agent one turn. The goal is to figure out from market variables which technologies to @@ -399,7 +399,7 @@ def add_investments( investments: xr.DataArray, current_year: int, time_period: int, - ): + ) -> None: """Add new assets to the agent.""" # Calculate retirement profile of new assets new_capacity = self.retirement_profile( diff --git a/src/muse/demand_share.py b/src/muse/demand_share.py index 9c7e42120..69fcca891 100644 --- a/src/muse/demand_share.py +++ b/src/muse/demand_share.py @@ -484,7 +484,7 @@ def _inner_split( # Calculates the demand divided by the number of assets times the number of agents # if the demand is bigger than zero and the total demand assigned with the "method" - # function is zero (i.e. no decrease in production). + # function is zero. unassigned = (demand / (len(shares) * len(summed_shares))).where( logical_and(demand > 1e-12, total <= 1e-12), 0 ) @@ -583,7 +583,6 @@ def new_consumption( ) assert isinstance(ts_capa, xr.DataArray) - # missing = unmet_demand(current, ts_capa, technologies) consumption = minimum(delta, missing) return consumption diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index d5c2b517c..8d0f082ff 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -71,7 +71,6 @@ def factory(cls, name: str, settings: Any) -> Sector: # Create outputs outputs = ofactory(*sector_settings.pop("outputs", []), sector_name=name) - # supply_args = sector_settings.pop( "supply", sector_settings.pop("dispatch_production", {}) ) @@ -171,8 +170,7 @@ def __init__( def forecast(self): """Maximum forecast horizon across agents. - If no agents with a "forecast" attribute are found, defaults to 5. It cannot be - lower than 1 year. + It cannot be lower than 1 year. """ forecasts = [getattr(agent, "forecast") for agent in self.agents] return max(1, max(forecasts)) diff --git a/src/muse/sectors/subsector.py b/src/muse/sectors/subsector.py index 3beb7efa9..edf82a191 100644 --- a/src/muse/sectors/subsector.py +++ b/src/muse/sectors/subsector.py @@ -87,7 +87,7 @@ def aggregate_lp( market: xr.Dataset, time_period, current_year, - ): + ) -> None: from muse.utilities import agent_concatenation, reduce_assets # Split demand across agents From 0c84ba9feed0040ee38092ee688bf1f2a12b574c Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 17 Oct 2024 15:12:34 +0100 Subject: [PATCH 43/92] Remove inline comment --- src/muse/demand_share.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/muse/demand_share.py b/src/muse/demand_share.py index 69fcca891..eeae9caf6 100644 --- a/src/muse/demand_share.py +++ b/src/muse/demand_share.py @@ -489,7 +489,6 @@ def _inner_split( logical_and(demand > 1e-12, total <= 1e-12), 0 ) - # ??? totals = { key: (share / share.sum("asset")).fillna(0) for key, share in shares.items() } From a02088e0fc1f59c039469183d426727aba9790c4 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Mon, 21 Oct 2024 15:09:41 +0100 Subject: [PATCH 44/92] Small changes --- src/muse/constraints.py | 1 - src/muse/demand_share.py | 3 +++ src/muse/readers/csv.py | 4 ++-- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index e783afc13..51f931925 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -466,7 +466,6 @@ def max_production( .drop_vars("technology") ) capacity = convert_timeslice(techs.fixed_outputs) * techs.utilization_factor - if "asset" not in capacity.dims and "asset" in search_space.dims: capacity = capacity.expand_dims(asset=search_space.asset) production = ones_like(capacity) diff --git a/src/muse/demand_share.py b/src/muse/demand_share.py index 5ddefcfdb..af81fccb6 100644 --- a/src/muse/demand_share.py +++ b/src/muse/demand_share.py @@ -602,8 +602,11 @@ def new_and_retro_demands( # Interpolate market to forecast year smarket: xr.Dataset = market.interp(year=[current_year, current_year + forecast]) + + # Interpolate capacity to forecast year capa = capacity.interp(year=[current_year, current_year + forecast]) assert isinstance(capa, xr.DataArray) + if hasattr(capa, "region") and capa.region.dims == (): capa["region"] = "asset", [str(capa.region.values)] * len(capa.asset) diff --git a/src/muse/readers/csv.py b/src/muse/readers/csv.py index 0a0a08404..f159d61c7 100644 --- a/src/muse/readers/csv.py +++ b/src/muse/readers/csv.py @@ -136,7 +136,7 @@ def to_agent_share(name): def read_technodata_timeslices(filename: Union[str, Path]) -> xr.Dataset: from muse.readers import camel_to_snake - from muse.timeslices import TIMESLICE, convert_timeslice + from muse.timeslices import convert_timeslice csv = pd.read_csv(filename, float_precision="high", low_memory=False) csv = csv.rename(columns=camel_to_snake) @@ -170,7 +170,7 @@ def read_technodata_timeslices(filename: Union[str, Path]) -> xr.Dataset: if item not in ["technology", "region", "year"] ] result = result.stack(timeslice=timeslice_levels) - result = convert_timeslice(result, TIMESLICE) + result = convert_timeslice(result) # sorts timeslices into the correct order return result From 4785412adff6da4e706685cc41b9fcb8269f63f2 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Tue, 22 Oct 2024 09:03:34 +0100 Subject: [PATCH 45/92] Fix constraints tests --- tests/test_constraints.py | 42 ++++++++++++++------------------------- 1 file changed, 15 insertions(+), 27 deletions(-) diff --git a/tests/test_constraints.py b/tests/test_constraints.py index 64e645883..3681821cf 100644 --- a/tests/test_constraints.py +++ b/tests/test_constraints.py @@ -20,14 +20,6 @@ def residential(model): return examples.sector("residential", model=model) -@fixture(params=["timeslice_as_list", "timeslice_as_multindex"]) -def timeslices(market, request): - timeslice = market.timeslice - if request.param == "timeslice_as_multindex": - timeslice = _as_list(timeslice) - return timeslice - - @fixture def technologies(residential): return residential.technologies.squeeze("region") @@ -206,12 +198,12 @@ def test_lp_constraint(constraint, lpcosts): assert result.b.values == approx(0) -def test_to_scipy_adapter_maxprod(technologies, costs, max_production, timeslices): +def test_to_scipy_adapter_maxprod(technologies, costs, max_production): from muse.constraints import ScipyAdapter, lp_costs technologies = technologies.interp(year=2025) - adapter = ScipyAdapter.factory(technologies, costs, timeslices, max_production) + adapter = ScipyAdapter.factory(technologies, costs, max_production) assert set(adapter.kwargs) == {"c", "A_ub", "b_ub", "A_eq", "b_eq", "bounds"} assert adapter.bounds == (0, np.inf) assert adapter.A_eq is None @@ -231,12 +223,12 @@ def test_to_scipy_adapter_maxprod(technologies, costs, max_production, timeslice assert adapter.A_ub[:, capsize:] == approx(np.eye(prodsize)) -def test_to_scipy_adapter_demand(technologies, costs, demand_constraint, timeslices): +def test_to_scipy_adapter_demand(technologies, costs, demand_constraint): from muse.constraints import ScipyAdapter, lp_costs technologies = technologies.interp(year=2025) - adapter = ScipyAdapter.factory(technologies, costs, timeslices, demand_constraint) + adapter = ScipyAdapter.factory(technologies, costs, demand_constraint) assert set(adapter.kwargs) == {"c", "A_ub", "b_ub", "A_eq", "b_eq", "bounds"} assert adapter.bounds == (0, np.inf) assert adapter.A_ub is not None @@ -263,15 +255,13 @@ def test_to_scipy_adapter_demand(technologies, costs, demand_constraint, timesli def test_to_scipy_adapter_max_capacity_expansion( - technologies, costs, max_capacity_expansion, timeslices + technologies, costs, max_capacity_expansion ): from muse.constraints import ScipyAdapter, lp_costs technologies = technologies.interp(year=2025) - adapter = ScipyAdapter.factory( - technologies, costs, timeslices, max_capacity_expansion - ) + adapter = ScipyAdapter.factory(technologies, costs, max_capacity_expansion) assert set(adapter.kwargs) == {"c", "A_ub", "b_ub", "A_eq", "b_eq", "bounds"} assert adapter.bounds == (0, np.inf) assert adapter.A_ub is not None @@ -295,12 +285,12 @@ def test_to_scipy_adapter_max_capacity_expansion( assert set(adapter.A_ub[:, :capsize].flatten()) == {0.0, 1.0} -def test_to_scipy_adapter_no_constraint(technologies, costs, timeslices): +def test_to_scipy_adapter_no_constraint(technologies, costs): from muse.constraints import ScipyAdapter, lp_costs technologies = technologies.interp(year=2025) - adapter = ScipyAdapter.factory(technologies, costs, timeslices) + adapter = ScipyAdapter.factory(technologies, costs) assert set(adapter.kwargs) == {"c", "A_ub", "b_ub", "A_eq", "b_eq", "bounds"} assert adapter.bounds == (0, np.inf) assert adapter.A_ub is None @@ -315,7 +305,7 @@ def test_to_scipy_adapter_no_constraint(technologies, costs, timeslices): assert adapter.c.size == capsize + prodsize -def test_back_to_muse_capacity(technologies, costs, timeslices): +def test_back_to_muse_capacity(technologies, costs): from muse.constraints import ScipyAdapter, lp_costs technologies = technologies.interp(year=2025) @@ -330,7 +320,7 @@ def test_back_to_muse_capacity(technologies, costs, timeslices): assert (copy == lpcosts.capacity).all() -def test_back_to_muse_production(technologies, costs, timeslices): +def test_back_to_muse_production(technologies, costs): from muse.constraints import ScipyAdapter, lp_costs technologies = technologies.interp(year=2025) @@ -350,7 +340,7 @@ def test_back_to_muse_production(technologies, costs, timeslices): assert (copy == lpcosts.production).all() -def test_back_to_muse_all(technologies, costs, timeslices, rng: np.random.Generator): +def test_back_to_muse_all(technologies, costs, rng: np.random.Generator): from muse.constraints import ScipyAdapter, lp_costs technologies = technologies.interp(year=2025) @@ -381,7 +371,7 @@ def test_back_to_muse_all(technologies, costs, timeslices, rng: np.random.Genera assert (copy.production == lpcosts.production).all() -def test_scipy_adapter_back_to_muse(technologies, costs, timeslices, rng): +def test_scipy_adapter_back_to_muse(technologies, costs, rng): from muse.constraints import ScipyAdapter, lp_costs technologies = technologies.interp(year=2025) @@ -404,7 +394,7 @@ def test_scipy_adapter_back_to_muse(technologies, costs, timeslices, rng): ) ) - adapter = ScipyAdapter.factory(technologies, costs, timeslices) + adapter = ScipyAdapter.factory(technologies, costs) assert (adapter.to_muse(x).capacity == lpcosts.capacity).all() assert (adapter.to_muse(x).production == lpcosts.production).all() @@ -420,14 +410,12 @@ def _as_list(data: Union[xr.DataArray, xr.Dataset]) -> Union[xr.DataArray, xr.Da return data -def test_scipy_adapter_standard_constraints( - technologies, costs, constraints, timeslices -): +def test_scipy_adapter_standard_constraints(technologies, costs, constraints): from muse.constraints import ScipyAdapter technologies = technologies.interp(year=2025) - adapter = ScipyAdapter.factory(technologies, costs, timeslices, *constraints) + adapter = ScipyAdapter.factory(technologies, costs, *constraints) maxprod = next(cs for cs in constraints if cs.name == "max_production") maxcapa = next(cs for cs in constraints if cs.name == "max capacity expansion") demand = next(cs for cs in constraints if cs.name == "demand") From 88d40b489f80f55685bebd6081f80106d97aaaa1 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Tue, 22 Oct 2024 09:07:30 +0100 Subject: [PATCH 46/92] Fix remaining tests --- src/muse/timeslices.py | 4 ++-- tests/test_readers.py | 4 ++-- tests/test_timeslices.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index 8f324bfb7..55eec9092 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -44,8 +44,8 @@ def read_timeslices( weight of each timeslice. Example: - >>> from muse.timeslices import reference_timeslice - >>> reference_timeslice( + >>> from muse.timeslices import read_timeslices + >>> read_timeslices( ... """ ... [timeslices] ... spring.weekday = 5 diff --git a/tests/test_readers.py b/tests/test_readers.py index 066c81b19..b4400a06a 100644 --- a/tests/test_readers.py +++ b/tests/test_readers.py @@ -484,8 +484,8 @@ def test_read_technodata_timeslices(tmp_path): assert isinstance(data, xr.Dataset) assert set(data.dims) == {"technology", "region", "year", "timeslice"} assert dict(data.dtypes) == dict( - utilization_factor=np.float64, - minimum_service_factor=np.float64, + utilization_factor=np.int64, + minimum_service_factor=np.int64, ) assert list(data.coords["technology"].values) == ["gasCCGT", "windturbine"] assert list(data.coords["region"].values) == ["R1"] diff --git a/tests/test_timeslices.py b/tests/test_timeslices.py index a704969e0..d1dd4e72f 100644 --- a/tests/test_timeslices.py +++ b/tests/test_timeslices.py @@ -51,7 +51,7 @@ def timeslice_dataarray(reference): ) -def test_reference_timeslice(): +def test_read_timeslices(): from toml import loads from muse.timeslices import read_timeslices From 4fbc1ff06e04035e33db66219011fa0e2edfb4dd Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Tue, 22 Oct 2024 16:17:24 +0100 Subject: [PATCH 47/92] Create separate functions for broadcasting and distributing timeslices --- src/muse/constraints.py | 12 ++++---- src/muse/costs.py | 14 ++++----- src/muse/demand_share.py | 2 +- src/muse/objectives.py | 16 +++++----- src/muse/outputs/mca.py | 50 ++++++++++++++----------------- src/muse/quantities.py | 20 ++++++++----- src/muse/readers/csv.py | 10 +++---- src/muse/sectors/preset_sector.py | 4 +-- src/muse/sectors/sector.py | 8 ++--- src/muse/timeslices.py | 42 +++++++------------------- tests/test_costs.py | 4 +-- tests/test_quantities.py | 18 +++++------ 12 files changed, 87 insertions(+), 113 deletions(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index 51f931925..16275624b 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -446,7 +446,7 @@ def max_production( from xarray import ones_like, zeros_like from muse.commodities import is_enduse - from muse.timeslices import convert_timeslice + from muse.timeslices import distribute_timeslice if year is None: year = int(market.year.min()) @@ -465,7 +465,7 @@ def max_production( .sel(**kwargs) .drop_vars("technology") ) - capacity = convert_timeslice(techs.fixed_outputs) * techs.utilization_factor + capacity = distribute_timeslice(techs.fixed_outputs) * techs.utilization_factor if "asset" not in capacity.dims and "asset" in search_space.dims: capacity = capacity.expand_dims(asset=search_space.asset) production = ones_like(capacity) @@ -724,7 +724,7 @@ def minimum_service( from xarray import ones_like, zeros_like from muse.commodities import is_enduse - from muse.timeslices import convert_timeslice + from muse.timeslices import distribute_timeslice if "minimum_service_factor" not in technologies.data_vars: return None @@ -747,7 +747,7 @@ def minimum_service( .sel(**kwargs) .drop_vars("technology") ) - capacity = convert_timeslice(techs.fixed_outputs) * techs.minimum_service_factor + capacity = distribute_timeslice(techs.fixed_outputs) * techs.minimum_service_factor if "asset" not in capacity.dims: capacity = capacity.expand_dims(asset=search_space.asset) production = ones_like(capacity) @@ -808,7 +808,7 @@ def lp_costs(technologies: xr.Dataset, costs: xr.DataArray) -> xr.Dataset: from xarray import zeros_like from muse.commodities import is_enduse - from muse.timeslices import convert_timeslice + from muse.timeslices import distribute_timeslice assert "year" not in technologies.dims @@ -821,7 +821,7 @@ def lp_costs(technologies: xr.Dataset, costs: xr.DataArray) -> xr.Dataset: selection["region"] = costs.region fouts = technologies.fixed_outputs.sel(selection).rename(technology="replacement") - production = zeros_like(costs * convert_timeslice(fouts)) + production = zeros_like(costs * distribute_timeslice(fouts)) for dim in production.dims: if isinstance(production.get_index(dim), pd.MultiIndex): production = drop_timeslice(production) diff --git a/src/muse/costs.py b/src/muse/costs.py index 3091cf94e..585417b6c 100644 --- a/src/muse/costs.py +++ b/src/muse/costs.py @@ -13,7 +13,7 @@ from muse.commodities import is_enduse, is_fuel, is_material, is_pollutant from muse.quantities import consumption -from muse.timeslices import convert_timeslice +from muse.timeslices import distribute_timeslice from muse.utilities import filter_input @@ -96,7 +96,7 @@ def net_present_value( raw_revenues = (production * prices_non_env * rates).sum(("commodity", "year")) # Cost of installed capacity - installed_capacity_costs = convert_timeslice( + installed_capacity_costs = distribute_timeslice( techs.cap_par * (capacity**techs.cap_exp), ) @@ -118,7 +118,7 @@ def net_present_value( material_costs = (production * prices_material * rates).sum(("commodity", "year")) # Fixed and Variable costs - fixed_costs = convert_timeslice( + fixed_costs = distribute_timeslice( techs.fix_par * (capacity**techs.fix_exp), ) variable_costs = techs.var_par * ( @@ -256,7 +256,7 @@ def lifetime_levelized_cost_of_energy( fuels = is_fuel(technologies.comm_usage) # Cost of installed capacity - installed_capacity_costs = convert_timeslice( + installed_capacity_costs = distribute_timeslice( techs.cap_par * (capacity**techs.cap_exp), ) @@ -278,7 +278,7 @@ def lifetime_levelized_cost_of_energy( material_costs = (production * prices_material * rates).sum(("commodity", "year")) # Fixed and Variable costs - fixed_costs = convert_timeslice( + fixed_costs = distribute_timeslice( techs.fix_par * (capacity**techs.fix_exp), ) variable_costs = ( @@ -364,11 +364,11 @@ def annual_levelized_cost_of_energy( rates = techs.interest_rate / (1 - (1 + techs.interest_rate) ** (-life)) annualized_capital_costs = ( - convert_timeslice(techs.cap_par * rates) / techs.utilization_factor + distribute_timeslice(techs.cap_par * rates) / techs.utilization_factor ) o_and_e_costs = ( - convert_timeslice(techs.fix_par + techs.var_par) / techs.utilization_factor + distribute_timeslice(techs.fix_par + techs.var_par) / techs.utilization_factor ) fuel_costs = (techs.fixed_inputs * prices).sum("commodity") diff --git a/src/muse/demand_share.py b/src/muse/demand_share.py index af81fccb6..e3fbb7b10 100644 --- a/src/muse/demand_share.py +++ b/src/muse/demand_share.py @@ -145,7 +145,7 @@ def new_and_retro( A_{a, s}^r = w_s\sum_i A_a^{r, i} with :math:`w_s` a weight associated with each timeslice and determined via - :py:func:`muse.timeslices.convert_timeslice`. + :py:func:`muse.timeslices.distribute_timeslice`. #. An intermediate quantity, the :py:func:`unmet demand ` :math:`U` is defined from diff --git a/src/muse/objectives.py b/src/muse/objectives.py index 9fb0988df..94e1d618c 100644 --- a/src/muse/objectives.py +++ b/src/muse/objectives.py @@ -383,12 +383,12 @@ def lifetime_levelized_cost_of_energy( due to a zero utilisation factor. """ from muse.costs import lifetime_levelized_cost_of_energy as LCOE - from muse.timeslices import convert_timeslice + from muse.timeslices import distribute_timeslice capacity = capacity_to_service_demand(technologies, demand) production = ( capacity - * convert_timeslice(technologies.fixed_outputs) + * distribute_timeslice(technologies.fixed_outputs) * technologies.utilization_factor ) @@ -416,12 +416,12 @@ def net_present_value( See :py:func:`muse.costs.net_present_value` for more details. """ from muse.costs import net_present_value as NPV - from muse.timeslices import convert_timeslice + from muse.timeslices import distribute_timeslice capacity = capacity_to_service_demand(technologies, demand) production = ( capacity - * convert_timeslice(technologies.fixed_outputs) + * distribute_timeslice(technologies.fixed_outputs) * technologies.utilization_factor ) @@ -448,12 +448,12 @@ def net_present_cost( See :py:func:`muse.costs.net_present_cost` for more details. """ from muse.costs import net_present_cost as NPC - from muse.timeslices import convert_timeslice + from muse.timeslices import distribute_timeslice capacity = capacity_to_service_demand(technologies, demand) production = ( capacity - * convert_timeslice(technologies.fixed_outputs) + * distribute_timeslice(technologies.fixed_outputs) * technologies.utilization_factor ) @@ -480,12 +480,12 @@ def equivalent_annual_cost( See :py:func:`muse.costs.equivalent_annual_cost` for more details. """ from muse.costs import equivalent_annual_cost as EAC - from muse.timeslices import convert_timeslice + from muse.timeslices import distribute_timeslice capacity = capacity_to_service_demand(technologies, demand) production = ( capacity - * convert_timeslice(technologies.fixed_outputs) + * distribute_timeslice(technologies.fixed_outputs) * technologies.utilization_factor ) diff --git a/src/muse/outputs/mca.py b/src/muse/outputs/mca.py index f714de2a0..b50976d11 100644 --- a/src/muse/outputs/mca.py +++ b/src/muse/outputs/mca.py @@ -35,7 +35,7 @@ def quantity( from muse.outputs.sector import market_quantity from muse.registration import registrator from muse.sectors import AbstractSector -from muse.timeslices import convert_timeslice, drop_timeslice +from muse.timeslices import distribute_timeslice, drop_timeslice from muse.utilities import multiindex_to_coords OUTPUT_QUANTITY_SIGNATURE = Callable[ @@ -350,12 +350,10 @@ def sector_supply(sector: AbstractSector, market: xr.Dataset, **kwargs) -> pd.Da ] agent_market.loc[dict(commodity=excluded)] = 0 - result = convert_timeslice( - supply( - agent_market, - capacity, - technologies, - ), + result = supply( + agent_market, + capacity, + technologies, ) if "year" in result.dims: @@ -580,13 +578,12 @@ def sector_consumption( ] agent_market.loc[dict(commodity=excluded)] = 0 - production = convert_timeslice( - supply( - agent_market, - capacity, - technologies, - ), + production = supply( + agent_market, + capacity, + technologies, ) + prices = a.filter_input(market.prices, year=output_year) result = consumption( technologies=technologies, production=production, prices=prices @@ -720,12 +717,10 @@ def sector_fuel_costs( year=output_year, ).fillna(0.0) - production = convert_timeslice( - supply( - agent_market, - capacity, - technologies, - ), + production = supply( + agent_market, + capacity, + technologies, ) prices = a.filter_input(market.prices, year=output_year) @@ -776,7 +771,7 @@ def sector_capital_costs( year=output_year, technology=capacity.technology, ) - data_agent = convert_timeslice(data.cap_par * (capacity**data.cap_exp)) + data_agent = distribute_timeslice(data.cap_par * (capacity**data.cap_exp)) data_agent["agent"] = a.name data_agent["category"] = a.category data_agent["sector"] = getattr(sector, "name", "unnamed") @@ -833,13 +828,12 @@ def sector_emission_costs( i = (np.where(envs))[0][0] red_envs = envs[i].commodity.values prices = a.filter_input(market.prices, year=output_year, commodity=red_envs) - production = convert_timeslice( - supply( - agent_market, - capacity, - technologies, - ), + production = supply( + agent_market, + capacity, + technologies, ) + total = production.sel(commodity=enduses).sum("commodity") data_agent = total * (allemissions * prices).sum("commodity") data_agent["agent"] = a.name @@ -906,7 +900,7 @@ def sector_lcoe(sector: AbstractSector, market: xr.Dataset, **kwargs) -> pd.Data capacity = agent.filter_input(capacity_to_service_demand(demand, techs)) production = ( capacity - * convert_timeslice(techs.fixed_outputs) + * distribute_timeslice(techs.fixed_outputs) * techs.utilization_factor ) @@ -983,7 +977,7 @@ def sector_eac(sector: AbstractSector, market: xr.Dataset, **kwargs) -> pd.DataF capacity = agent.filter_input(capacity_to_service_demand(demand, techs)) production = ( capacity - * convert_timeslice(techs.fixed_outputs) + * distribute_timeslice(techs.fixed_outputs) * techs.utilization_factor ) diff --git a/src/muse/quantities.py b/src/muse/quantities.py index 0d7513982..13b627a70 100644 --- a/src/muse/quantities.py +++ b/src/muse/quantities.py @@ -153,7 +153,7 @@ def gross_margin( - non-environmental commodities OUTPUTS are related to revenues. """ from muse.commodities import is_enduse, is_pollutant - from muse.timeslices import convert_timeslice + from muse.timeslices import distribute_timeslice from muse.utilities import broadcast_techs tech = broadcast_techs( # type: ignore @@ -190,7 +190,7 @@ def gross_margin( enduses = is_enduse(technologies.comm_usage) # Variable costs depend on factors such as labour - variable_costs = convert_timeslice( + variable_costs = distribute_timeslice( var_par * ((fixed_outputs.sel(commodity=enduses)).sum("commodity")) ** var_exp, ) @@ -340,7 +340,7 @@ def maximum_production(technologies: xr.Dataset, capacity: xr.DataArray, **filte filters and the set of technologies in `capacity`. """ from muse.commodities import is_enduse - from muse.timeslices import convert_timeslice + from muse.timeslices import distribute_timeslice from muse.utilities import broadcast_techs, filter_input capa = filter_input( @@ -352,7 +352,9 @@ def maximum_production(technologies: xr.Dataset, capacity: xr.DataArray, **filte ftechs = filter_input( btechs, **{k: v for k, v in filters.items() if k in btechs.dims} ) - result = capa * convert_timeslice(ftechs.fixed_outputs) * ftechs.utilization_factor + result = ( + capa * distribute_timeslice(ftechs.fixed_outputs) * ftechs.utilization_factor + ) return result.where(is_enduse(result.comm_usage), 0) @@ -543,7 +545,7 @@ def minimum_production(technologies: xr.Dataset, capacity: xr.DataArray, **filte the filters and the set of technologies in `capacity`. """ from muse.commodities import is_enduse - from muse.timeslices import convert_timeslice + from muse.timeslices import distribute_timeslice from muse.utilities import broadcast_techs, filter_input capa = filter_input( @@ -564,7 +566,9 @@ def minimum_production(technologies: xr.Dataset, capacity: xr.DataArray, **filte btechs, **{k: v for k, v in filters.items() if k in btechs.dims} ) result = ( - capa * convert_timeslice(ftechs.fixed_outputs) * ftechs.minimum_service_factor + capa + * distribute_timeslice(ftechs.fixed_outputs) + * ftechs.minimum_service_factor ) return result.where(is_enduse(result.comm_usage), 0) @@ -574,10 +578,10 @@ def capacity_to_service_demand( technologies: xr.Dataset, ) -> xr.DataArray: """Minimum capacity required to fulfill the demand.""" - from muse.timeslices import convert_timeslice + from muse.timeslices import distribute_timeslice timeslice_outputs = ( - convert_timeslice(technologies.fixed_outputs.sel(commodity=demand.commodity)) + distribute_timeslice(technologies.fixed_outputs.sel(commodity=demand.commodity)) * technologies.utilization_factor ) capa_to_service_demand = demand / timeslice_outputs diff --git a/src/muse/readers/csv.py b/src/muse/readers/csv.py index f159d61c7..91936a30b 100644 --- a/src/muse/readers/csv.py +++ b/src/muse/readers/csv.py @@ -136,7 +136,7 @@ def to_agent_share(name): def read_technodata_timeslices(filename: Union[str, Path]) -> xr.Dataset: from muse.readers import camel_to_snake - from muse.timeslices import convert_timeslice + from muse.timeslices import TIMESLICE csv = pd.read_csv(filename, float_precision="high", low_memory=False) csv = csv.rename(columns=camel_to_snake) @@ -170,7 +170,7 @@ def read_technodata_timeslices(filename: Union[str, Path]) -> xr.Dataset: if item not in ["technology", "region", "year"] ] result = result.stack(timeslice=timeslice_levels) - result = convert_timeslice(result) + result = result.sel(timeslice=TIMESLICE.timeslice) # sorts timeslices into the correct order return result @@ -607,7 +607,7 @@ def read_initial_market( """Read projections, import and export csv files.""" from logging import getLogger - from muse.timeslices import TIMESLICE, convert_timeslice + from muse.timeslices import TIMESLICE, distribute_timeslice # Projections must always be present if isinstance(projections, (str, Path)): @@ -630,8 +630,8 @@ def read_initial_market( getLogger(__name__).info("Base year import not provided. Set to zero.") base_year_import = xr.zeros_like(projections) - base_year_export = convert_timeslice(base_year_export) - base_year_import = convert_timeslice(base_year_import) + base_year_export = distribute_timeslice(base_year_export) + base_year_import = distribute_timeslice(base_year_import) base_year_export.name = "exports" base_year_import.name = "imports" diff --git a/src/muse/sectors/preset_sector.py b/src/muse/sectors/preset_sector.py index 04c6e48ca..116fece7e 100644 --- a/src/muse/sectors/preset_sector.py +++ b/src/muse/sectors/preset_sector.py @@ -30,7 +30,7 @@ def factory(cls, name: str, settings: Any) -> PresetSector: read_timeslice_shares, ) from muse.regressions import endogenous_demand - from muse.timeslices import TIMESLICE, convert_timeslice + from muse.timeslices import TIMESLICE, distribute_timeslice sector_conf = getattr(settings.sectors, name) presets = Dataset() @@ -118,7 +118,7 @@ def factory(cls, name: str, settings: Any) -> PresetSector: # add timeslice, if missing for component in {"supply", "consumption"}: if "timeslice" not in presets[component].dims: - presets[component] = convert_timeslice(presets[component]) + presets[component] = distribute_timeslice(presets[component]) comm_usage = (presets.costs > 0).any(set(presets.costs.dims) - {"commodity"}) presets["comm_usage"] = ( diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index 7ce1bae40..9d10165cf 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -384,7 +384,7 @@ def convert_market_timeslice( intensive: str | tuple[str] = "prices", ) -> xr.Dataset: """Converts market from one to another timeslice.""" - from muse.timeslices import TIMESLICE, QuantityType, convert_timeslice + from muse.timeslices import broadcast_timeslice if isinstance(intensive, str): intensive = (intensive,) @@ -393,11 +393,7 @@ def convert_market_timeslice( intensives = market[list(timesliced.intersection(intensive))] if "timeslice" not in intensives.dims: - intensives = convert_timeslice( - intensives, - TIMESLICE, - QuantityType.EXTENSIVE, - ) + intensives = broadcast_timeslice(intensives) extensives = market[list(timesliced.difference(intensives.data_vars))] others = market[list(set(market.data_vars).difference(timesliced))] return xr.merge([intensives, extensives, others]) diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index 55eec9092..ce6425ffd 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -2,13 +2,13 @@ __all__ = [ "read_timeslices", - "convert_timeslice", + "broadcast_timeslice", + "distribute_timeslice", "drop_timeslice", "setup_module", ] from collections.abc import Mapping, Sequence -from enum import Enum, unique from typing import Union from numpy import ndarray @@ -105,43 +105,23 @@ def setup_module(settings: Union[str, Mapping]): TIMESLICE = read_timeslices(settings) -@unique -class QuantityType(Enum): - """Underlying transformation when performing time-slice conversion. - - The meaning of a quantity vs the time-slice can be different: - - - intensive: when extending the period of interest, quantities should be - added together. For instance the number of hours should be summed across - months. - - extensive: when extending the period of interest, quantities should be - broadcasted. For instance when extending a price from a one week period to - a two week period, the price should remain the same. Going in the opposite - direction (reducing the length of the time period), quantities should be - averaged. - """ - - INTENSIVE = "intensive" - EXTENSIVE = "extensive" - - -def convert_timeslice(x, ts=None, quantity=QuantityType.INTENSIVE): +def broadcast_timeslice(x, ts=None): from xarray import Coordinates if ts is None: ts = TIMESLICE - if hasattr(x, "timeslice"): - x = x.sel(timeslice=ts["timeslice"]) - return x - mindex_coords = Coordinates.from_pandas_multiindex(ts.timeslice, "timeslice") extensive = x.expand_dims(timeslice=ts["timeslice"]).assign_coords(mindex_coords) - if quantity is QuantityType.EXTENSIVE: - return extensive + return extensive + + +def distribute_timeslice(x, ts=None): + if ts is None: + ts = TIMESLICE - if quantity is QuantityType.INTENSIVE: - return extensive * (ts / ts.sum()) + extensive = broadcast_timeslice(x, ts) + return extensive * (ts / ts.sum()) def drop_timeslice(data: DataArray) -> DataArray: diff --git a/tests/test_costs.py b/tests/test_costs.py index 715239102..11cf46df6 100644 --- a/tests/test_costs.py +++ b/tests/test_costs.py @@ -19,11 +19,11 @@ def _capacity(technologies, demand_share): @fixture def _production(technologies, _capacity): - from muse.timeslices import convert_timeslice + from muse.timeslices import distribute_timeslice production = ( _capacity - * convert_timeslice(technologies.fixed_outputs) + * distribute_timeslice(technologies.fixed_outputs) * technologies.utilization_factor ) return production diff --git a/tests/test_quantities.py b/tests/test_quantities.py index bcd6c4cd6..d15943319 100644 --- a/tests/test_quantities.py +++ b/tests/test_quantities.py @@ -31,14 +31,14 @@ def production( ) -> xr.DataArray: from numpy.random import random - from muse.timeslices import convert_timeslice + from muse.timeslices import distribute_timeslice comms = xr.DataArray( random(len(technologies.commodity)), coords={"commodity": technologies.commodity}, dims="commodity", ) - return capacity * convert_timeslice(comms) + return capacity * distribute_timeslice(comms) def make_array(array): @@ -50,17 +50,17 @@ def test_supply_enduse(technologies, capacity, timeslice): """End-use part of supply.""" from muse.commodities import is_enduse from muse.quantities import maximum_production, supply - from muse.timeslices import convert_timeslice + from muse.timeslices import distribute_timeslice production = maximum_production(technologies, capacity) - demand = convert_timeslice(production.sum("asset") + 1) + demand = distribute_timeslice(production.sum("asset") + 1) spl = supply(capacity, demand, technologies).where( is_enduse(technologies.comm_usage), 0 ) assert (abs(spl - production) < 1e-12).all() assert (spl.sum("asset") < demand).all() - demand = convert_timeslice(production.sum("asset") * 0.7) + demand = distribute_timeslice(production.sum("asset") * 0.7) spl = supply(capacity, demand, technologies).where( is_enduse(technologies.comm_usage), 0 ) @@ -87,7 +87,7 @@ def test_supply_emissions(technologies, capacity): def test_gross_margin(technologies, capacity, market, timeslice): from muse.commodities import is_enduse, is_fuel, is_pollutant from muse.quantities import gross_margin - from muse.timeslices import convert_timeslice + from muse.timeslices import distribute_timeslice """ Gross margin refers to the calculation @@ -117,7 +117,7 @@ def test_gross_margin(technologies, capacity, market, timeslice): revenues = prices * prod * sum(is_enduse(usage)) env_costs = env_prices * envs * sum(is_pollutant(usage)) cons_costs = prices * fuels * sum(is_fuel(usage)) - var_costs = convert_timeslice(vp * ((prod * sum(is_enduse(usage))) ** ve)) + var_costs = distribute_timeslice(vp * ((prod * sum(is_enduse(usage))) ** ve)) expected = revenues - env_costs - cons_costs - var_costs expected *= 100 / revenues @@ -177,7 +177,7 @@ def test_consumption_with_flex(technologies, production, market, timeslice): from muse.commodities import is_enduse, is_fuel from muse.quantities import consumption - from muse.timeslices import convert_timeslice + from muse.timeslices import distribute_timeslice techs = technologies.copy() techs.fixed_inputs[:] = 0 @@ -206,7 +206,7 @@ def one_dim(dimension): prices = timeslice + commodity + year * region assert set(prices.dims) == set(market.prices.dims) noenduse = ~is_enduse(techs.comm_usage) - production = convert_timeslice(asset * year + commodity) + production = distribute_timeslice(asset * year + commodity) production.loc[{"commodity": noenduse}] = 0 actual = consumption(technologies, production, prices) From 0258ae983ce0254dd77398f9044cb8e9f8aab95e Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Tue, 22 Oct 2024 17:28:41 +0100 Subject: [PATCH 48/92] Check for existing timeslice dimension in broadcast_timeslice --- src/muse/timeslices.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index ce6425ffd..d22ab791f 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -111,6 +111,12 @@ def broadcast_timeslice(x, ts=None): if ts is None: ts = TIMESLICE + # If x already has timeslices, check that it is matches the reference timeslice. + if "timeslice" in x.dims: + if x.timeslice.reset_coords(drop=True).equals(ts.timeslice): + return x + raise ValueError("x has incompatible timeslicing.") + mindex_coords = Coordinates.from_pandas_multiindex(ts.timeslice, "timeslice") extensive = x.expand_dims(timeslice=ts["timeslice"]).assign_coords(mindex_coords) return extensive @@ -121,7 +127,7 @@ def distribute_timeslice(x, ts=None): ts = TIMESLICE extensive = broadcast_timeslice(x, ts) - return extensive * (ts / ts.sum()) + return extensive * (ts / broadcast_timeslice(ts.sum())) def drop_timeslice(data: DataArray) -> DataArray: From b0ce2283e07cafd3927d4dfbb117187f060320c5 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Tue, 22 Oct 2024 17:51:13 +0100 Subject: [PATCH 49/92] Fix test --- tests/test_quantities.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/test_quantities.py b/tests/test_quantities.py index d15943319..8a1da12db 100644 --- a/tests/test_quantities.py +++ b/tests/test_quantities.py @@ -50,17 +50,16 @@ def test_supply_enduse(technologies, capacity, timeslice): """End-use part of supply.""" from muse.commodities import is_enduse from muse.quantities import maximum_production, supply - from muse.timeslices import distribute_timeslice production = maximum_production(technologies, capacity) - demand = distribute_timeslice(production.sum("asset") + 1) + demand = production.sum("asset") + 1 spl = supply(capacity, demand, technologies).where( is_enduse(technologies.comm_usage), 0 ) assert (abs(spl - production) < 1e-12).all() assert (spl.sum("asset") < demand).all() - demand = distribute_timeslice(production.sum("asset") * 0.7) + demand = production.sum("asset") * 0.7 spl = supply(capacity, demand, technologies).where( is_enduse(technologies.comm_usage), 0 ) From 885f752f48cca023a84f1910816ee0bde6e91e40 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 25 Oct 2024 08:33:20 +0000 Subject: [PATCH 50/92] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- tests/test_quantities.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_quantities.py b/tests/test_quantities.py index bbf6e2429..252cda67a 100644 --- a/tests/test_quantities.py +++ b/tests/test_quantities.py @@ -86,7 +86,7 @@ def test_supply_emissions(technologies, capacity): def test_gross_margin(technologies, capacity, market, timeslice): from muse.commodities import is_enduse, is_fuel, is_pollutant from muse.quantities import gross_margin - + """ Gross margin refers to the calculation .. _here: From 433c39c2775cb6074945f23b181b95d5c0733571 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 25 Oct 2024 09:47:21 +0100 Subject: [PATCH 51/92] Fix merge conflicts --- src/muse/costs.py | 29 ++++++----------------------- src/muse/objectives.py | 11 ++++------- src/muse/quantities.py | 9 ++------- 3 files changed, 12 insertions(+), 37 deletions(-) diff --git a/src/muse/costs.py b/src/muse/costs.py index 4c18a12a1..b379e93f9 100644 --- a/src/muse/costs.py +++ b/src/muse/costs.py @@ -371,38 +371,21 @@ def annual_levelized_cost_of_energy( distribute_timeslice(techs.fix_par + techs.var_par) / techs.utilization_factor ) - fuel_costs = ( - convert_timeslice(techs.fixed_inputs, prices.timeslice, QuantityType.EXTENSIVE) - * prices - ).sum("commodity") - - fuel_costs += ( - convert_timeslice( - techs.flexible_inputs, prices.timeslice, QuantityType.EXTENSIVE - ) - * prices - ).sum("commodity") + fuel_costs = (distribute_timeslice(techs.fixed_inputs) * prices).sum("commodity") + fuel_costs += (distribute_timeslice(techs.flexible_inputs) * prices).sum( + "commodity" + ) if "region" in techs.dims: env_costs = ( - ( - convert_timeslice( - techs.fixed_outputs, prices.timeslice, QuantityType.EXTENSIVE - ) - * prices - ) + (distribute_timeslice(techs.fixed_outputs) * prices) .sel(region=techs.region) .sel(commodity=is_pollutant(techs.comm_usage)) .sum("commodity") ) else: env_costs = ( - ( - convert_timeslice( - techs.fixed_outputs, prices.timeslice, QuantityType.EXTENSIVE - ) - * prices - ) + (distribute_timeslice(techs.fixed_outputs) * prices) .sel(commodity=is_pollutant(techs.comm_usage)) .sum("commodity") ) diff --git a/src/muse/objectives.py b/src/muse/objectives.py index 27804da05..e0a67e017 100644 --- a/src/muse/objectives.py +++ b/src/muse/objectives.py @@ -317,18 +317,15 @@ def emission_cost( with :math:`s` the timeslices and :math:`c` the commodity. """ from muse.commodities import is_enduse, is_pollutant - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import distribute_timeslice enduses = is_enduse(technologies.comm_usage.sel(commodity=demand.commodity)) total = demand.sel(commodity=enduses).sum("commodity") envs = is_pollutant(technologies.comm_usage) prices = filter_input(prices, year=demand.year.item(), commodity=envs) - return total * ( - convert_timeslice( - technologies.fixed_outputs, prices.timeslice, QuantityType.EXTENSIVE - ) - * prices - ).sum("commodity") + return total * (distribute_timeslice(technologies.fixed_outputs) * prices).sum( + "commodity" + ) @register_objective diff --git a/src/muse/quantities.py b/src/muse/quantities.py index c79caeb98..cbfa57d64 100644 --- a/src/muse/quantities.py +++ b/src/muse/quantities.py @@ -196,14 +196,9 @@ def gross_margin( # The individual prices are selected # costs due to consumables, direct inputs - consumption_costs = ( - prices - * convert_timeslice(fixed_inputs, prices.timeslice, QuantityType.EXTENSIVE) - ).sum("commodity") + consumption_costs = (prices * distribute_timeslice(fixed_inputs)).sum("commodity") # costs due to pollutants - production_costs = prices * convert_timeslice( - fixed_outputs, prices.timeslice, QuantityType.EXTENSIVE - ) + production_costs = prices * distribute_timeslice(fixed_outputs) environmental_costs = (production_costs.sel(commodity=environmentals)).sum( "commodity" ) From d5875f99076ee0fff94513d0efb62ad95c4ab91b Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Mon, 28 Oct 2024 13:38:08 +0000 Subject: [PATCH 52/92] Fix tests --- tests/test_constraints.py | 1 - tests/test_quantities.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/test_constraints.py b/tests/test_constraints.py index 672bc5a5d..3681821cf 100644 --- a/tests/test_constraints.py +++ b/tests/test_constraints.py @@ -68,7 +68,6 @@ def market_demand(assets, technologies): return 0.8 * maximum_production( technologies.interp(year=2025), assets.capacity.sel(year=2025).groupby("technology").sum("asset"), - timeslices=market.timeslice, ).rename(technology="asset") diff --git a/tests/test_quantities.py b/tests/test_quantities.py index ed56f24ab..7771bb136 100644 --- a/tests/test_quantities.py +++ b/tests/test_quantities.py @@ -597,7 +597,7 @@ def test_min_production(technologies, capacity, timeslice): # If no minimum service factor is defined, the minimum production is zero assert "minimum_service_factor" not in technologies - production = minimum_production(technologies, capacity, timeslice) + production = minimum_production(technologies, capacity) assert (production == 0).all() # If minimum service factor is defined, then the minimum production is not zero From 46ab8208b04272229333d64cbc1fa8659b239267 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Mon, 28 Oct 2024 13:48:35 +0000 Subject: [PATCH 53/92] Remove timeslice arguments --- src/muse/constraints.py | 1 - src/muse/demand_share.py | 4 --- src/muse/investments.py | 1 - src/muse/production.py | 2 +- src/muse/quantities.py | 13 +++------ tests/test_demand_share.py | 2 -- tests/test_quantities.py | 54 ++++++++++++-------------------------- 7 files changed, 22 insertions(+), 55 deletions(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index 62bfbcfbd..16275624b 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -1078,7 +1078,6 @@ class ScipyAdapter: >>> market_demand = 0.8 * maximum_production( ... res.technologies.interp(year=2025), ... assets.capacity.sel(year=2025).groupby("technology").sum("asset"), - ... timeslices=market.timeslice, ... ).rename(technology="asset") >>> costs = search * np.arange(np.prod(search.shape)).reshape(search.shape) >>> constraint = cs.max_capacity_expansion( diff --git a/src/muse/demand_share.py b/src/muse/demand_share.py index 41df0886c..d36719a29 100644 --- a/src/muse/demand_share.py +++ b/src/muse/demand_share.py @@ -236,7 +236,6 @@ def decommissioning(capacity): technologies, capacity, year=[current_year, current_year + forecast], - timeslices=market.timeslice, ).squeeze("year") capacity = reduce_assets([u.assets.capacity for u in agents]) @@ -311,7 +310,6 @@ def decommissioning(capacity): partial( maximum_production, technologies=regional_techs, - timeslices=market.timeslice, year=current_year, ), id_to_nquantity, @@ -365,7 +363,6 @@ def decommissioning(capacity): technologies, capacity, year=[current_year, current_year + forecast], - timeslices=market.timeslice, ).squeeze("year") # Make sure there are no retrofit agents @@ -418,7 +415,6 @@ def decommissioning(capacity): partial( maximum_production, technologies=technologies.sel(region=region), - timeslices=market.timeslice, year=current_year, ), id_to_quantity, diff --git a/src/muse/investments.py b/src/muse/investments.py index 18fe9a012..87ab26ce9 100644 --- a/src/muse/investments.py +++ b/src/muse/investments.py @@ -234,7 +234,6 @@ def adhoc_match_demand( max_prod = maximum_production( technologies, max_capacity, - timeslices=demand, year=year, technology=costs.replacement, commodity=demand.commodity, diff --git a/src/muse/production.py b/src/muse/production.py index ac5e9ac46..b23d4666d 100644 --- a/src/muse/production.py +++ b/src/muse/production.py @@ -108,7 +108,7 @@ def maximum_production( """ from muse.quantities import maximum_production - return maximum_production(technologies, capacity, timeslices=market.timeslice) + return maximum_production(technologies, capacity) @register_production(name=("share", "shares")) diff --git a/src/muse/quantities.py b/src/muse/quantities.py index c153d6c07..2d836ab0f 100644 --- a/src/muse/quantities.py +++ b/src/muse/quantities.py @@ -48,8 +48,8 @@ def supply( if production_method is None: production_method = maximum_production - maxprod = production_method(technologies, capacity, timeslices=demand) - minprod = minimum_production(technologies, capacity, timeslices=demand) + maxprod = production_method(technologies, capacity) + minprod = minimum_production(technologies, capacity) size = np.array(maxprod.region).size # in presence of trade demand needs to map maxprod dst_region if ( @@ -216,7 +216,6 @@ def gross_margin( def decommissioning_demand( technologies: xr.Dataset, capacity: xr.DataArray, - timeslices: xr.DataArray, year: Optional[Sequence[int]] = None, ) -> xr.DataArray: r"""Computes demand from process decommissioning. @@ -259,7 +258,6 @@ def decommissioning_demand( return maximum_production( technologies, capacity_decrease, - timeslices=timeslices, ).clip(min=0) @@ -392,9 +390,7 @@ def demand_matched_production( technodata = cast(xr.Dataset, broadcast_techs(technologies, capacity)) cost = ALCOE(prices=prices, technologies=technodata, **filters) - max_production = maximum_production( - technodata, capacity, timeslices=demand, **filters - ) + max_production = maximum_production(technodata, capacity, **filters) assert ("timeslice" in demand.dims) == ("timeslice" in cost.dims) return demand_matching(demand, cost, max_production) @@ -479,7 +475,7 @@ def group_assets(x: xr.DataArray) -> xr.DataArray: return xr.Dataset(dict(x=x)).groupby("region").sum("asset").x ranking = costs.rank("asset") - maxprod = maximum_production(technodata, capacity, timeslices=demand.timeslice) + maxprod = maximum_production(technodata, capacity) commodity = (maxprod > 0).any([i for i in maxprod.dims if i != "commodity"]) commodity = commodity.drop_vars( [u for u in commodity.coords if u not in commodity.dims] @@ -529,7 +525,6 @@ def group_assets(x: xr.DataArray) -> xr.DataArray: def minimum_production( technologies: xr.Dataset, capacity: xr.DataArray, - timeslices: xr.DataArray, **filters, ): r"""Minimum production for a given capacity. diff --git a/tests/test_demand_share.py b/tests/test_demand_share.py index 2f0b1dd74..32668a4a6 100644 --- a/tests/test_demand_share.py +++ b/tests/test_demand_share.py @@ -158,7 +158,6 @@ def method(capacity): return decommissioning_demand( technologies.sel(region="USA"), capacity, - matching_market.timeslice, year=[2012, 2017], ) @@ -195,7 +194,6 @@ def method(capacity): return 0 * decommissioning_demand( technologies.sel(region="USA"), capacity, - matching_market.timeslice, year=[2012, 2017], ) diff --git a/tests/test_quantities.py b/tests/test_quantities.py index 7771bb136..c069f9e7a 100644 --- a/tests/test_quantities.py +++ b/tests/test_quantities.py @@ -46,7 +46,7 @@ def make_array(array): return xr.DataArray(data, dims=array.dims, coords=array.coords) -def test_supply_enduse(technologies, capacity, timeslice): +def test_supply_enduse(technologies, capacity): """End-use part of supply.""" from muse.commodities import is_enduse from muse.quantities import maximum_production, supply @@ -69,12 +69,12 @@ def test_supply_enduse(technologies, capacity, timeslice): ).all() -def test_supply_emissions(technologies, capacity, timeslice): +def test_supply_emissions(technologies, capacity): """Emission part of supply.""" from muse.commodities import is_enduse, is_pollutant from muse.quantities import emission, maximum_production, supply - production = maximum_production(technologies, capacity, timeslices=timeslice) + production = maximum_production(technologies, capacity) spl = supply(capacity, production.sum("asset") + 1, technologies) msn = emission(spl.where(is_enduse(spl.comm_usage), 0), technologies.fixed_outputs) actual, expected = xr.broadcast( @@ -134,7 +134,7 @@ def test_decommissioning_demand(technologies, capacity, timeslice): capacity.loc[{"year": 2015}] = forecast = 1.0 technologies.fixed_outputs[:] = fouts = 0.5 technologies.utilization_factor[:] = ufac = 0.4 - decom = decommissioning_demand(technologies, capacity, timeslice, years) + decom = decommissioning_demand(technologies, capacity, years) assert set(decom.dims) == {"asset", "commodity", "year", "timeslice"} assert decom.sel(commodity=is_enduse(technologies.comm_usage)).sum( "timeslice" @@ -242,7 +242,7 @@ def one_dim(dimension): def test_production_aggregate_asset_view( - capacity: xr.DataArray, technologies: xr.Dataset, timeslice: xr.DataArray + capacity: xr.DataArray, technologies: xr.Dataset ): """Production when capacity has format of agent.sector. @@ -260,7 +260,7 @@ def test_production_aggregate_asset_view( technologies.fixed_outputs[:] = 1 technologies.utilization_factor[:] = 1 - prod = maximum_production(technologies, capacity, timeslices=timeslice) + prod = maximum_production(technologies, capacity) assert set(prod.dims) == set(capacity.dims).union({"commodity", "timeslice"}) assert prod.sel(commodity=~enduses).values == approx(0) prod, expected = xr.broadcast( @@ -270,7 +270,7 @@ def test_production_aggregate_asset_view( technologies.fixed_outputs[:] = fouts = 2 technologies.utilization_factor[:] = ufact = 0.5 - prod = maximum_production(technologies, capacity, timeslices=timeslice) + prod = maximum_production(technologies, capacity) assert prod.sel(commodity=~enduses).values == approx(0) assert set(prod.dims) == set(capacity.dims).union({"commodity", "timeslice"}) prod, expected = xr.broadcast( @@ -280,7 +280,7 @@ def test_production_aggregate_asset_view( technologies.fixed_outputs[:] = fouts = 3 technologies.utilization_factor[:] = ufact = 0.5 - prod = maximum_production(technologies, capacity, timeslices=timeslice) + prod = maximum_production(technologies, capacity) assert prod.sel(commodity=~enduses).values == approx(0) assert set(prod.dims) == set(capacity.dims).union({"commodity", "timeslice"}) prod, expected = xr.broadcast( @@ -406,7 +406,7 @@ def test_demand_matched_production( technologies.fixed_outputs[:] *= is_enduse(technologies.comm_usage) capacity = capacity.sel(year=capacity.year.min(), drop=True) - max_prod = maximum_production(technologies, capacity, timeslices=demand.timeslice) + max_prod = maximum_production(technologies, capacity) demand = max_prod.sum("asset") demand[:] *= np.random.choice([0, 1, 1 / 2, 1 / 3, 1 / 10], demand.shape) prices = xr.zeros_like(demand) @@ -433,13 +433,7 @@ def test_costed_production_exact_match(market, capacity, technologies): prices=market.prices.sel(region=technodata.region), technologies=technodata ) maxdemand = ( - xr.Dataset( - dict( - mp=maximum_production( - technologies, capacity, timeslices=market.timeslice - ) - ) - ) + xr.Dataset(dict(mp=maximum_production(technologies, capacity))) .groupby("region") .sum("asset") .mp @@ -465,9 +459,7 @@ def test_costed_production_single_region(market, capacity, technologies): capacity = capacity.drop_vars("region") capacity["region"] = "USA" market = market.sel(region=[capacity.region.values]) - maxdemand = maximum_production( - technologies, capacity, timeslices=market.timeslice - ).sum("asset") + maxdemand = maximum_production(technologies, capacity).sum("asset") market["consumption"] = drop_timeslice(0.9 * maxdemand) technodata = broadcast_techs(technologies, capacity) costs = annual_levelized_cost_of_energy( @@ -493,13 +485,7 @@ def test_costed_production_single_year(market, capacity, technologies): capacity = capacity.sel(year=2010) market = market.sel(year=2010) maxdemand = ( - xr.Dataset( - dict( - mp=maximum_production( - technologies, capacity, timeslices=market.timeslice - ) - ) - ) + xr.Dataset(dict(mp=maximum_production(technologies, capacity))) .groupby("region") .sum("asset") .mp @@ -532,13 +518,7 @@ def test_costed_production_over_capacity(market, capacity, technologies): set(market.region.values) ) maxdemand = ( - xr.Dataset( - dict( - mp=maximum_production( - technologies, capacity, timeslices=market.timeslice - ) - ) - ) + xr.Dataset(dict(mp=maximum_production(technologies, capacity))) .groupby("region") .sum("asset") .mp @@ -573,7 +553,7 @@ def test_costed_production_with_minimum_service(market, capacity, technologies, technologies.utilization_factor.dims, rng.uniform(low=0.5, high=0.9, size=technologies.utilization_factor.shape), ) - maxprod = maximum_production(technologies, capacity, timeslices=market.timeslice) + maxprod = maximum_production(technologies, capacity) minprod = maxprod * broadcast_techs(technologies.minimum_service_factor, maxprod) maxdemand = xr.Dataset(dict(mp=minprod)).groupby("region").sum("asset").mp market["consumption"] = drop_timeslice(maxdemand * 0.9) @@ -603,9 +583,9 @@ def test_min_production(technologies, capacity, timeslice): # If minimum service factor is defined, then the minimum production is not zero # and it is less than the maximum production technologies["minimum_service_factor"] = 0.5 - production = minimum_production(technologies, capacity, timeslice) + production = minimum_production(technologies, capacity) assert not (production == 0).all() - assert (production <= maximum_production(technologies, capacity, timeslice)).all() + assert (production <= maximum_production(technologies, capacity)).all() def test_supply_capped_by_min_service(technologies, capacity, timeslice): @@ -614,7 +594,7 @@ def test_supply_capped_by_min_service(technologies, capacity, timeslice): from muse.quantities import minimum_production, supply technologies["minimum_service_factor"] = 0.3 - minprod = minimum_production(technologies, capacity, timeslice) + minprod = minimum_production(technologies, capacity) # If minimum service factor is defined, then the minimum production is not zero assert not (minprod == 0).all() From d5b5676e236f0cb72dcdced740c3e045df2fca20 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Mon, 28 Oct 2024 15:33:48 +0000 Subject: [PATCH 54/92] Fix tests --- tests/test_quantities.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_quantities.py b/tests/test_quantities.py index c069f9e7a..a451c8e6f 100644 --- a/tests/test_quantities.py +++ b/tests/test_quantities.py @@ -46,7 +46,7 @@ def make_array(array): return xr.DataArray(data, dims=array.dims, coords=array.coords) -def test_supply_enduse(technologies, capacity): +def test_supply_enduse(technologies, capacity, timeslice): """End-use part of supply.""" from muse.commodities import is_enduse from muse.quantities import maximum_production, supply @@ -69,7 +69,7 @@ def test_supply_enduse(technologies, capacity): ).all() -def test_supply_emissions(technologies, capacity): +def test_supply_emissions(technologies, capacity, timeslice): """Emission part of supply.""" from muse.commodities import is_enduse, is_pollutant from muse.quantities import emission, maximum_production, supply @@ -296,7 +296,7 @@ def test_production_agent_asset_view( from muse.utilities import coords_to_multiindex, reduce_assets capacity = coords_to_multiindex(reduce_assets(capacity)).unstack("asset").fillna(0) - test_production_aggregate_asset_view(capacity, technologies, timeslice) + test_production_aggregate_asset_view(capacity, technologies) def test_capacity_in_use(production: xr.DataArray, technologies: xr.Dataset): From ae3c06cbe683717ef63c4d59ee7c922ccf95637a Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 30 Oct 2024 11:36:09 +0000 Subject: [PATCH 55/92] Turn off automatic broadcasting over the timeslice dimension (#530) * xarray patch to prevent automatic broadcasting * Fix most remaining broadcasting bugs * Fix some tests * Fix more tests * Simplify dlc constraint * More timeslice broadcasting * Fix incorrect uses of distribute_timeslice * Fix bug in _inner_split * Remove unnecessary drop_timeslice operations * Fix correlation model * Fix a couple of tests * Restore drop_timeslice * Restore more drop_timeslice * Fix demand_matching tests * Fix correlation model * Consistent timeslice dimension in objectives * Revert change to capacity_in_use * Fix objective tests * Fix more tests * Fix another test * Fix final test (hopefully) --- src/muse/__main__.py | 31 ++++++++++++++++- src/muse/agents/agent.py | 5 +-- src/muse/constraints.py | 40 +++++++++------------ src/muse/costs.py | 46 ++++++++++++++---------- src/muse/decisions.py | 4 ++- src/muse/demand_share.py | 4 ++- src/muse/investments.py | 8 ++--- src/muse/mca.py | 6 +++- src/muse/objectives.py | 46 ++++++++++++++---------- src/muse/quantities.py | 58 ++++++++++++++++++++----------- src/muse/readers/csv.py | 12 ------- src/muse/sectors/preset_sector.py | 22 ++++-------- tests/conftest.py | 10 ++++++ tests/test_costs.py | 6 ++-- tests/test_quantities.py | 20 +++++++---- 15 files changed, 186 insertions(+), 132 deletions(-) diff --git a/src/muse/__main__.py b/src/muse/__main__.py index a71bc7f0a..d0b1ffc8c 100644 --- a/src/muse/__main__.py +++ b/src/muse/__main__.py @@ -61,5 +61,34 @@ def run(): muse_main(args.settings, args.model, args.copy) +def patched_broadcast_compat_data(self, other): + from xarray.core.variable import Variable, _broadcast_compat_variables + + if (isinstance(other, Variable)) and ("timeslice" in self.dims) != ( + "timeslice" in getattr(other, "dims", []) + ): + raise ValueError( + "Broadcasting is necessary but automatic broadcasting is disabled globally." + ) + + if all(hasattr(other, attr) for attr in ["dims", "data", "shape", "encoding"]): + # `other` satisfies the necessary Variable API for broadcast_variables + new_self, new_other = _broadcast_compat_variables(self, other) + self_data = new_self.data + other_data = new_other.data + dims = new_self.dims + else: + # rely on numpy broadcasting rules + self_data = self.data + other_data = other + dims = self.dims + return self_data, other_data, dims + + if "__main__" == __name__: - run() + from unittest.mock import patch + + with patch( + "xarray.core.variable._broadcast_compat_data", patched_broadcast_compat_data + ): + run() diff --git a/src/muse/agents/agent.py b/src/muse/agents/agent.py index 7e69d463a..7c674e7d7 100644 --- a/src/muse/agents/agent.py +++ b/src/muse/agents/agent.py @@ -352,10 +352,7 @@ def next( # Calculate the decision metric decision = self.compute_decision(technologies, market, demand, search_space) search = xr.Dataset(dict(search_space=search_space, decision=decision)) - if "timeslice" in search.dims: - search["demand"] = drop_timeslice(demand) - else: - search["demand"] = demand + search["demand"] = drop_timeslice(demand) # Filter assets with demand not_assets = [u for u in search.demand.dims if u != "asset"] diff --git a/src/muse/constraints.py b/src/muse/constraints.py index 16275624b..bb261a0ae 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -446,7 +446,7 @@ def max_production( from xarray import ones_like, zeros_like from muse.commodities import is_enduse - from muse.timeslices import distribute_timeslice + from muse.timeslices import broadcast_timeslice, distribute_timeslice if year is None: year = int(market.year.min()) @@ -465,7 +465,9 @@ def max_production( .sel(**kwargs) .drop_vars("technology") ) - capacity = distribute_timeslice(techs.fixed_outputs) * techs.utilization_factor + capacity = distribute_timeslice(techs.fixed_outputs) * broadcast_timeslice( + techs.utilization_factor + ) if "asset" not in capacity.dims and "asset" in search_space.dims: capacity = capacity.expand_dims(asset=search_space.asset) production = ones_like(capacity) @@ -482,8 +484,8 @@ def max_production( maxadd = maxadd.rename(technology="replacement") maxadd = maxadd.where(maxadd == 0, 0.0) maxadd = maxadd.where(maxadd > 0, -1.0) - capacity = capacity * maxadd - production = production * maxadd + capacity = capacity * broadcast_timeslice(maxadd) + production = production * broadcast_timeslice(maxadd) b = b.rename(region="src_region") return xr.Dataset( dict(capacity=-cast(np.ndarray, capacity), production=production, b=b), @@ -534,21 +536,9 @@ def demand_limiting_capacity( # utilization factor. if "timeslice" in b.dims or "timeslice" in capacity.dims: ratio = b / capacity - ts = ratio.timeslice.isel( - timeslice=ratio.min("replacement").argmax("timeslice") - ) - # We select this timeslice for each array - don't trust the indices: - # search for the right timeslice in the array and select it. - b = ( - b.isel(timeslice=(b.timeslice == ts).argmax("timeslice")) - if "timeslice" in b.dims - else b - ) - capacity = ( - capacity.isel(timeslice=(capacity.timeslice == ts).argmax("timeslice")) - if "timeslice" in capacity.dims - else capacity - ) + ts_index = ratio.min("replacement").argmax("timeslice") + b = b.isel(timeslice=ts_index) + capacity = capacity.isel(timeslice=ts_index) # An adjustment is required to account for technologies that have multiple output # commodities @@ -724,7 +714,7 @@ def minimum_service( from xarray import ones_like, zeros_like from muse.commodities import is_enduse - from muse.timeslices import distribute_timeslice + from muse.timeslices import broadcast_timeslice, distribute_timeslice if "minimum_service_factor" not in technologies.data_vars: return None @@ -747,7 +737,9 @@ def minimum_service( .sel(**kwargs) .drop_vars("technology") ) - capacity = distribute_timeslice(techs.fixed_outputs) * techs.minimum_service_factor + capacity = distribute_timeslice(techs.fixed_outputs) * broadcast_timeslice( + techs.minimum_service_factor + ) if "asset" not in capacity.dims: capacity = capacity.expand_dims(asset=search_space.asset) production = ones_like(capacity) @@ -803,12 +795,12 @@ def lp_costs(technologies: xr.Dataset, costs: xr.DataArray) -> xr.Dataset: which production occurs and the ``commodity`` produced. >>> lpcosts.production.dims - ('asset', 'replacement', 'timeslice', 'commodity') + ('timeslice', 'asset', 'replacement', 'commodity') """ from xarray import zeros_like from muse.commodities import is_enduse - from muse.timeslices import distribute_timeslice + from muse.timeslices import broadcast_timeslice, distribute_timeslice assert "year" not in technologies.dims @@ -821,7 +813,7 @@ def lp_costs(technologies: xr.Dataset, costs: xr.DataArray) -> xr.Dataset: selection["region"] = costs.region fouts = technologies.fixed_outputs.sel(selection).rename(technology="replacement") - production = zeros_like(costs * distribute_timeslice(fouts)) + production = zeros_like(broadcast_timeslice(costs) * distribute_timeslice(fouts)) for dim in production.dims: if isinstance(production.get_index(dim), pd.MultiIndex): production = drop_timeslice(production) diff --git a/src/muse/costs.py b/src/muse/costs.py index aaa64b620..bc35f2ad4 100644 --- a/src/muse/costs.py +++ b/src/muse/costs.py @@ -13,7 +13,7 @@ from muse.commodities import is_enduse, is_fuel, is_material, is_pollutant from muse.quantities import consumption -from muse.timeslices import distribute_timeslice +from muse.timeslices import broadcast_timeslice, distribute_timeslice from muse.utilities import filter_input @@ -79,10 +79,12 @@ def net_present_value( years = xr.DataArray(iyears, coords={"year": iyears}, dims="year") # Evolution of rates with time - rates = discount_factor( - years - year + 1, - interest_rate=techs.interest_rate, - mask=years <= year + life, + rates = broadcast_timeslice( + discount_factor( + years - year + 1, + interest_rate=techs.interest_rate, + mask=years <= year + life, + ) ) # Filters @@ -121,8 +123,9 @@ def net_present_value( fixed_costs = distribute_timeslice( techs.fix_par * (capacity**techs.fix_exp), ) - variable_costs = techs.var_par * ( - (production.sel(commodity=products).sum("commodity")) ** techs.var_exp + variable_costs = broadcast_timeslice(techs.var_par) * ( + (production.sel(commodity=products).sum("commodity")) + ** broadcast_timeslice(techs.var_exp) ) assert set(fixed_costs.dims) == set(variable_costs.dims) fixed_and_variable_costs = ((fixed_costs + variable_costs) * rates).sum("year") @@ -196,7 +199,7 @@ def equivalent_annual_cost( """ npc = net_present_cost(technologies, prices, capacity, production, year) crf = capital_recovery_factor(technologies) - return npc * crf + return npc * broadcast_timeslice(crf) def lifetime_levelized_cost_of_energy( @@ -220,6 +223,8 @@ def lifetime_levelized_cost_of_energy( Return: xr.DataArray with the LCOE calculated for the relevant technologies """ + from muse.timeslices import broadcast_timeslice, distribute_timeslice + techs = technologies[ [ "technical_life", @@ -243,10 +248,12 @@ def lifetime_levelized_cost_of_energy( years = xr.DataArray(iyears, coords={"year": iyears}, dims="year") # Evolution of rates with time - rates = discount_factor( - years=years - year + 1, - interest_rate=techs.interest_rate, - mask=years <= year + life, + rates = broadcast_timeslice( + discount_factor( + years=years - year + 1, + interest_rate=techs.interest_rate, + mask=years <= year + life, + ) ) # Filters @@ -282,7 +289,8 @@ def lifetime_levelized_cost_of_energy( techs.fix_par * (capacity**techs.fix_exp), ) variable_costs = ( - techs.var_par * production.sel(commodity=products) ** techs.var_exp + broadcast_timeslice(techs.var_par) + * production.sel(commodity=products) ** broadcast_timeslice(techs.var_exp) ).sum("commodity") fixed_and_variable_costs = ((fixed_costs + variable_costs) * rates).sum("year") denominator = production.where(production > 0.0, 1e-6) @@ -364,14 +372,14 @@ def annual_levelized_cost_of_energy( rates = techs.interest_rate / (1 - (1 + techs.interest_rate) ** (-life)) # Capital costs - annualized_capital_costs = ( - distribute_timeslice(techs.cap_par * rates) / techs.utilization_factor - ) + annualized_capital_costs = distribute_timeslice( + techs.cap_par * rates + ) / broadcast_timeslice(techs.utilization_factor) # Fixed and variable running costs - o_and_e_costs = ( - distribute_timeslice(techs.fix_par + techs.var_par) / techs.utilization_factor - ) + o_and_e_costs = distribute_timeslice( + techs.fix_par + techs.var_par + ) / broadcast_timeslice(techs.utilization_factor) # Fuel costs from fixed and flexible inputs fuel_costs = (distribute_timeslice(techs.fixed_inputs) * prices).sum("commodity") diff --git a/src/muse/decisions.py b/src/muse/decisions.py index b75b2a446..e24a3a9c9 100644 --- a/src/muse/decisions.py +++ b/src/muse/decisions.py @@ -117,7 +117,9 @@ def mean(objectives: Dataset, *args, **kwargs) -> DataArray: from xarray import concat allobjectives = concat(objectives.data_vars.values(), dim="concat_var") - return allobjectives.mean(set(allobjectives.dims) - {"asset", "replacement"}) + return allobjectives.mean( + set(allobjectives.dims) - {"asset", "replacement", "timeslice"} + ) @register_decision diff --git a/src/muse/demand_share.py b/src/muse/demand_share.py index d36719a29..9a672cb01 100644 --- a/src/muse/demand_share.py +++ b/src/muse/demand_share.py @@ -488,7 +488,9 @@ def _inner_split( # Calculates the demand divided by the number of assets times the number of agents # if the demand is bigger than zero and the total demand assigned with the "method" # function is zero. - unassigned = (demand / (len(shares) * len(summed_shares))).where( + n_agents = len(quantity) + n_assets = summed_shares.sizes["asset"] + unassigned = (demand / (n_agents * n_assets)).where( logical_and(demand > 1e-12, total <= 1e-12), 0 ) diff --git a/src/muse/investments.py b/src/muse/investments.py index 87ab26ce9..67ac780d5 100644 --- a/src/muse/investments.py +++ b/src/muse/investments.py @@ -241,10 +241,6 @@ def adhoc_match_demand( # Push disabled techs to last rank. # Any production assigned to them by the demand-matching algorithm will be removed. - - if "timeslice" in costs.dims: - costs = costs.mean("timeslice").mean("asset") # timeslice_op(costs) - minobj = costs.min() maxobj = costs.where(search_space, minobj).max("replacement") + 1 @@ -388,6 +384,6 @@ def default_to_scipy(): def timeslice_op(x: xr.DataArray) -> xr.DataArray: - from muse.timeslices import TIMESLICE + from muse.timeslices import TIMESLICE, broadcast_timeslice - return (x / (TIMESLICE / sum(TIMESLICE))).max("timeslice") + return (x / (TIMESLICE / broadcast_timeslice(TIMESLICE.sum()))).max("timeslice") diff --git a/src/muse/mca.py b/src/muse/mca.py index 1e56a2d82..850e7673e 100644 --- a/src/muse/mca.py +++ b/src/muse/mca.py @@ -41,6 +41,7 @@ def factory(cls, settings: str | Path | Mapping | Any) -> MCA: from muse.outputs.mca import factory as ofactory from muse.readers import read_settings from muse.readers.toml import convert + from muse.timeslices import drop_timeslice if isinstance(settings, (str, Path)): settings = read_settings(settings) # type: ignore @@ -275,6 +276,8 @@ def run(self) -> None: from xarray import DataArray + from muse.timeslices import broadcast_timeslice + nyear = len(self.time_framework) - 1 check_carbon_budget = len(self.carbon_budget) and len(self.carbon_commodities) shoots = self.control_undershoot or self.control_overshoot @@ -295,7 +298,7 @@ def run(self) -> None: new_market.prices.loc[dict(commodity=self.carbon_commodities)] = ( future_propagation( new_market.prices.sel(commodity=self.carbon_commodities), - future_price, + broadcast_timeslice(future_price), ) ) self.carbon_price = future_propagation(self.carbon_price, future_price) @@ -359,6 +362,7 @@ def single_year_iteration( from copy import deepcopy from muse.commodities import is_enduse + from muse.timeslices import drop_timeslice sectors = deepcopy(sectors) market = market.copy(deep=True) diff --git a/src/muse/objectives.py b/src/muse/objectives.py index e0a67e017..5e51d7ef0 100644 --- a/src/muse/objectives.py +++ b/src/muse/objectives.py @@ -133,12 +133,16 @@ def objectives( *args, **kwargs, ) -> xr.Dataset: + from muse.timeslices import broadcast_timeslice + result = xr.Dataset() for name, objective in functions: obj = objective( technologies=technologies, demand=demand, prices=prices, *args, **kwargs ) - if "timeslice" in obj.dims and "timeslice" in result.dims: + if "timeslice" not in obj.dims: + obj = broadcast_timeslice(obj) + if "timeslice" in result.dims: obj = drop_timeslice(obj) result[name] = obj return result @@ -274,7 +278,9 @@ def fixed_costs( :math:`\alpha` and :math:`\beta` are "fix_par" and "fix_exp" in :ref:`inputs-technodata`, respectively. """ - capacity = capacity_to_service_demand(technologies, demand) + from muse.quantities import capacity_to_service_demand + + capacity = capacity_to_service_demand(technologies=technologies, demand=demand) result = technologies.fix_par * (capacity**technologies.fix_exp) return result @@ -386,13 +392,14 @@ def lifetime_levelized_cost_of_energy( due to a zero utilisation factor. """ from muse.costs import lifetime_levelized_cost_of_energy as LCOE - from muse.timeslices import distribute_timeslice + from muse.quantities import capacity_to_service_demand + from muse.timeslices import broadcast_timeslice, distribute_timeslice - capacity = capacity_to_service_demand(technologies, demand) + capacity = capacity_to_service_demand(technologies=technologies, demand=demand) production = ( - capacity + broadcast_timeslice(capacity) * distribute_timeslice(technologies.fixed_outputs) - * technologies.utilization_factor + * broadcast_timeslice(technologies.utilization_factor) ) results = LCOE( @@ -419,13 +426,14 @@ def net_present_value( See :py:func:`muse.costs.net_present_value` for more details. """ from muse.costs import net_present_value as NPV - from muse.timeslices import distribute_timeslice + from muse.quantities import capacity_to_service_demand + from muse.timeslices import broadcast_timeslice, distribute_timeslice - capacity = capacity_to_service_demand(technologies, demand) + capacity = capacity_to_service_demand(technologies=technologies, demand=demand) production = ( - capacity + broadcast_timeslice(capacity) * distribute_timeslice(technologies.fixed_outputs) - * technologies.utilization_factor + * broadcast_timeslice(technologies.utilization_factor) ) results = NPV( @@ -451,13 +459,14 @@ def net_present_cost( See :py:func:`muse.costs.net_present_cost` for more details. """ from muse.costs import net_present_cost as NPC - from muse.timeslices import distribute_timeslice + from muse.quantities import capacity_to_service_demand + from muse.timeslices import broadcast_timeslice, distribute_timeslice - capacity = capacity_to_service_demand(technologies, demand) + capacity = capacity_to_service_demand(technologies=technologies, demand=demand) production = ( - capacity + broadcast_timeslice(capacity) * distribute_timeslice(technologies.fixed_outputs) - * technologies.utilization_factor + * broadcast_timeslice(technologies.utilization_factor) ) results = NPC( @@ -483,13 +492,14 @@ def equivalent_annual_cost( See :py:func:`muse.costs.equivalent_annual_cost` for more details. """ from muse.costs import equivalent_annual_cost as EAC - from muse.timeslices import distribute_timeslice + from muse.quantities import capacity_to_service_demand + from muse.timeslices import broadcast_timeslice, distribute_timeslice - capacity = capacity_to_service_demand(technologies, demand) + capacity = capacity_to_service_demand(technologies=technologies, demand=demand) production = ( - capacity + broadcast_timeslice(capacity) * distribute_timeslice(technologies.fixed_outputs) - * technologies.utilization_factor + * broadcast_timeslice(technologies.utilization_factor) ) results = EAC( diff --git a/src/muse/quantities.py b/src/muse/quantities.py index 2d836ab0f..e172099cf 100644 --- a/src/muse/quantities.py +++ b/src/muse/quantities.py @@ -44,6 +44,7 @@ def supply( input commodities). """ from muse.commodities import CommodityUsage, check_usage, is_pollutant + from muse.timeslices import broadcast_timeslice if production_method is None: production_method = maximum_production @@ -88,8 +89,12 @@ def supply( demsum = set(maxprod.dims).difference(demand.dims) expanded_demand = (demand * maxprod / maxprod.sum(demsum)).fillna(0) - expanded_maxprod = (maxprod * demand / demand.sum(prodsum)).fillna(0) - expanded_minprod = (minprod * demand / demand.sum(prodsum)).fillna(0) + expanded_maxprod = ( + maxprod * demand / broadcast_timeslice(demand.sum(prodsum)) + ).fillna(0) + expanded_minprod = ( + minprod * demand / broadcast_timeslice(demand.sum(prodsum)) + ).fillna(0) expanded_demand = expanded_demand.reindex_like(maxprod) expanded_minprod = expanded_minprod.reindex_like(maxprod) @@ -125,6 +130,7 @@ def emission(production: xr.DataArray, fixed_outputs: xr.DataArray): A data array containing emissions (and only emissions). """ from muse.commodities import is_enduse, is_pollutant + from muse.timeslices import broadcast_timeslice from muse.utilities import broadcast_techs # just in case we are passed a technologies dataset, like in other functions @@ -133,8 +139,8 @@ def emission(production: xr.DataArray, fixed_outputs: xr.DataArray): ) envs = is_pollutant(fouts.comm_usage) enduses = is_enduse(fouts.comm_usage) - return production.sel(commodity=enduses).sum("commodity") * fouts.sel( - commodity=envs + return production.sel(commodity=enduses).sum("commodity") * broadcast_timeslice( + fouts.sel(commodity=envs) ) @@ -273,6 +279,7 @@ def consumption( are not given, then flexible consumption is *not* considered. """ from muse.commodities import is_enduse, is_fuel + from muse.timeslices import broadcast_timeslice from muse.utilities import filter_with_template params = filter_with_template( @@ -285,7 +292,9 @@ def consumption( production = production.sel(commodity=is_enduse(comm_usage)).sum("commodity") params_fuels = is_fuel(params.comm_usage) - consumption = production * params.fixed_inputs.where(params_fuels, 0) + consumption = production * broadcast_timeslice( + params.fixed_inputs.where(params_fuels, 0) + ) if prices is None: return consumption @@ -306,7 +315,7 @@ def consumption( ] # add consumption from cheapest fuel assert all(flexs.commodity.values == consumption.commodity.values) - flex = flexs.where(minprices == flexs.commodity, 0) + flex = flexs.where(minprices == broadcast_timeslice(flexs.commodity), 0) flex = flex / (flex > 0).sum("commodity").clip(min=1) return consumption + flex * production @@ -349,7 +358,7 @@ def maximum_production( filters and the set of technologies in `capacity`. """ from muse.commodities import is_enduse - from muse.timeslices import distribute_timeslice + from muse.timeslices import broadcast_timeslice, distribute_timeslice from muse.utilities import broadcast_techs, filter_input capa = filter_input( @@ -362,7 +371,9 @@ def maximum_production( btechs, **{k: v for k, v in filters.items() if k in btechs.dims} ) result = ( - capa * distribute_timeslice(ftechs.fixed_outputs) * ftechs.utilization_factor + broadcast_timeslice(capa) + * distribute_timeslice(ftechs.fixed_outputs) + * broadcast_timeslice(ftechs.utilization_factor) ) return result.where(is_enduse(result.comm_usage), 0) @@ -421,6 +432,7 @@ def capacity_in_use( Capacity-in-use for each technology, whittled down by the filters. """ from muse.commodities import is_enduse + from muse.timeslices import broadcast_timeslice from muse.utilities import broadcast_techs, filter_input prod = filter_input( @@ -435,7 +447,7 @@ def capacity_in_use( ) factor = 1 / (ftechs.fixed_outputs * ftechs.utilization_factor) - capa_in_use = (prod * factor).where(~np.isinf(factor), 0) + capa_in_use = (prod * broadcast_timeslice(factor)).where(~np.isinf(factor), 0) capa_in_use = capa_in_use.where( is_enduse(technologies.comm_usage.sel(commodity=capa_in_use.commodity)), 0 @@ -460,6 +472,7 @@ def costed_production( service is applied first. """ from muse.quantities import maximum_production + from muse.timeslices import broadcast_timeslice from muse.utilities import broadcast_techs technodata = cast(xr.Dataset, broadcast_techs(technologies, capacity)) @@ -492,9 +505,13 @@ def group_assets(x: xr.DataArray) -> xr.DataArray: if not with_minimum_service: production = xr.zeros_like(constraints.maxprod) else: - production = ( - getattr(technodata, "minimum_service_factor", 0) * constraints.maxprod - ) + if hasattr(technodata, "minimum_service_factor"): + production = ( + broadcast_timeslice(technodata.minimum_service_factor) + * constraints.maxprod + ) + else: + production = 0 * constraints.maxprod demand = np.maximum(demand - group_assets(production), 0) for rank in sorted(set(constraints.ranking.values.flatten())): @@ -560,7 +577,7 @@ def minimum_production( the filters and the set of technologies in `capacity`. """ from muse.commodities import is_enduse - from muse.timeslices import distribute_timeslice + from muse.timeslices import broadcast_timeslice, distribute_timeslice from muse.utilities import broadcast_techs, filter_input capa = filter_input( @@ -568,7 +585,7 @@ def minimum_production( ) if "minimum_service_factor" not in technologies: - return xr.zeros_like(capa) + return broadcast_timeslice(xr.zeros_like(capa)) btechs = broadcast_techs( # type: ignore cast( @@ -581,9 +598,9 @@ def minimum_production( btechs, **{k: v for k, v in filters.items() if k in btechs.dims} ) result = ( - capa + broadcast_timeslice(capa) * distribute_timeslice(ftechs.fixed_outputs) - * ftechs.minimum_service_factor + * broadcast_timeslice(ftechs.minimum_service_factor) ) return result.where(is_enduse(result.comm_usage), 0) @@ -593,11 +610,10 @@ def capacity_to_service_demand( technologies: xr.Dataset, ) -> xr.DataArray: """Minimum capacity required to fulfill the demand.""" - from muse.timeslices import distribute_timeslice + from muse.timeslices import broadcast_timeslice, distribute_timeslice - timeslice_outputs = ( - distribute_timeslice(technologies.fixed_outputs.sel(commodity=demand.commodity)) - * technologies.utilization_factor - ) + timeslice_outputs = distribute_timeslice( + technologies.fixed_outputs.sel(commodity=demand.commodity) + ) * broadcast_timeslice(technologies.utilization_factor) capa_to_service_demand = demand / timeslice_outputs return capa_to_service_demand.max(("commodity", "timeslice")) diff --git a/src/muse/readers/csv.py b/src/muse/readers/csv.py index 91936a30b..5438dfa4b 100644 --- a/src/muse/readers/csv.py +++ b/src/muse/readers/csv.py @@ -450,7 +450,6 @@ def read_global_commodities(path: Union[str, Path]) -> xr.Dataset: def read_timeslice_shares( path: Union[str, Path] = DEFAULT_SECTORS_DIRECTORY, sector: Optional[str] = None, - timeslice: Union[str, Path, xr.DataArray] = "Timeslices{sector}.csv", ) -> xr.Dataset: """Reads sliceshare information into a xr.Dataset. @@ -469,10 +468,6 @@ def read_timeslice_shares( path, filename = path.parent, path.name re = match(r"TimesliceShare(.*)\.csv", filename) sector = path.name if re is None else re.group(1) - if isinstance(timeslice, str) and "{sector}" in timeslice: - timeslice = timeslice.format(sector=sector) - if isinstance(timeslice, (str, Path)) and not Path(timeslice).is_file(): - timeslice = find_sectors_file(timeslice, sector, path) share_path = find_sectors_file(f"TimesliceShare{sector}.csv", sector, path) getLogger(__name__).info(f"Reading timeslice shares from {share_path}") @@ -485,13 +480,6 @@ def read_timeslice_shares( data.columns.name = "commodity" result = xr.DataArray(data).unstack("rt").to_dataset(name="shares") - - if timeslice is None: - result = result.drop_vars("timeslice") - elif isinstance(timeslice, xr.DataArray) and hasattr(timeslice, "timeslice"): - result["timeslice"] = timeslice.timeslice - else: - result["timeslice"] = timeslice return result.shares diff --git a/src/muse/sectors/preset_sector.py b/src/muse/sectors/preset_sector.py index 116fece7e..03bf20080 100644 --- a/src/muse/sectors/preset_sector.py +++ b/src/muse/sectors/preset_sector.py @@ -30,7 +30,7 @@ def factory(cls, name: str, settings: Any) -> PresetSector: read_timeslice_shares, ) from muse.regressions import endogenous_demand - from muse.timeslices import TIMESLICE, distribute_timeslice + from muse.timeslices import TIMESLICE, broadcast_timeslice, distribute_timeslice sector_conf = getattr(settings.sectors, name) presets = Dataset() @@ -68,22 +68,14 @@ def factory(cls, name: str, settings: Any) -> PresetSector: if getattr(sector_conf, "timeslice_shares_path", None) is not None: assert isinstance(timeslice, DataArray) - shares = read_timeslice_shares( - sector_conf.timeslice_shares_path, timeslice=timeslice - ) + shares = read_timeslice_shares(sector_conf.timeslice_shares_path) + shares = shares.assign_coords(timeslice=timeslice) assert consumption.commodity.isin(shares.commodity).all() assert consumption.region.isin(shares.region).all() - if "timeslice" in shares.dims: - ts = shares.timeslice - shares = drop_timeslice(shares) - consumption = (shares * consumption).assign_coords(timeslice=ts) - else: - consumption = consumption * shares.sel( - region=consumption.region, commodity=consumption.commodity - ) - presets["consumption"] = drop_timeslice(consumption).assign_coords( - timeslice=timeslice - ) + consumption = broadcast_timeslice(consumption) * shares.sel( + region=consumption.region, commodity=consumption.commodity + ) + presets["consumption"] = consumption if getattr(sector_conf, "supply_path", None) is not None: supply = read_presets(sector_conf.supply_path) diff --git a/tests/conftest.py b/tests/conftest.py index 9984599a5..bb58727a5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,12 +1,14 @@ from collections.abc import Mapping, Sequence from pathlib import Path from typing import Callable, Optional +from unittest.mock import patch import numpy as np from pandas import DataFrame from pytest import fixture, mark from xarray import DataArray, Dataset +from muse.__main__ import patched_broadcast_compat_data from muse.agents import Agent @@ -19,6 +21,14 @@ def logger(): return logger +@fixture(autouse=True) +def patch_broadcast_compat_data(): + with patch( + "xarray.core.variable._broadcast_compat_data", patched_broadcast_compat_data + ): + yield + + @fixture(scope="session") def cases_directory() -> Optional[Path]: try: diff --git a/tests/test_costs.py b/tests/test_costs.py index 11cf46df6..1099e2312 100644 --- a/tests/test_costs.py +++ b/tests/test_costs.py @@ -19,12 +19,12 @@ def _capacity(technologies, demand_share): @fixture def _production(technologies, _capacity): - from muse.timeslices import distribute_timeslice + from muse.timeslices import broadcast_timeslice, distribute_timeslice production = ( - _capacity + broadcast_timeslice(_capacity) * distribute_timeslice(technologies.fixed_outputs) - * technologies.utilization_factor + * broadcast_timeslice(technologies.utilization_factor) ) return production diff --git a/tests/test_quantities.py b/tests/test_quantities.py index a451c8e6f..f5db481d1 100644 --- a/tests/test_quantities.py +++ b/tests/test_quantities.py @@ -31,14 +31,14 @@ def production( ) -> xr.DataArray: from numpy.random import random - from muse.timeslices import distribute_timeslice + from muse.timeslices import broadcast_timeslice, distribute_timeslice comms = xr.DataArray( random(len(technologies.commodity)), coords={"commodity": technologies.commodity}, dims="commodity", ) - return capacity * distribute_timeslice(comms) + return broadcast_timeslice(capacity) * distribute_timeslice(comms) def make_array(array): @@ -144,6 +144,7 @@ def test_decommissioning_demand(technologies, capacity, timeslice): def test_consumption_no_flex(technologies, production, market): from muse.commodities import is_enduse, is_fuel from muse.quantities import consumption + from muse.timeslices import broadcast_timeslice fins = ( technologies.fixed_inputs.where(is_fuel(technologies.comm_usage), 0) @@ -156,7 +157,7 @@ def test_consumption_no_flex(technologies, production, market): ) services = technologies.commodity.sel(commodity=is_enduse(technologies.comm_usage)) expected = ( - (production.rename(commodity="comm_in") * fins) + (production.rename(commodity="comm_in") * broadcast_timeslice(fins)) .sel(comm_in=production.commodity.isin(services).rename(commodity="comm_in")) .sum("comm_in") ) @@ -175,7 +176,7 @@ def test_consumption_with_flex(technologies, production, market, timeslice): from muse.commodities import is_enduse, is_fuel from muse.quantities import consumption - from muse.timeslices import distribute_timeslice + from muse.timeslices import broadcast_timeslice, distribute_timeslice techs = technologies.copy() techs.fixed_inputs[:] = 0 @@ -201,7 +202,11 @@ def one_dim(dimension): timeslice = one_dim(market.timeslice) commodity = one_dim(market.commodity) - prices = timeslice + commodity + year * region + prices = ( + timeslice + + broadcast_timeslice(commodity) + + broadcast_timeslice(year) * broadcast_timeslice(region) + ) assert set(prices.dims) == set(market.prices.dims) noenduse = ~is_enduse(techs.comm_usage) production = distribute_timeslice(asset * year + commodity) @@ -543,6 +548,7 @@ def test_costed_production_with_minimum_service(market, capacity, technologies, costed_production, maximum_production, ) + from muse.timeslices import broadcast_timeslice from muse.utilities import broadcast_techs if set(capacity.region.values) != set(market.region.values): @@ -554,7 +560,9 @@ def test_costed_production_with_minimum_service(market, capacity, technologies, rng.uniform(low=0.5, high=0.9, size=technologies.utilization_factor.shape), ) maxprod = maximum_production(technologies, capacity) - minprod = maxprod * broadcast_techs(technologies.minimum_service_factor, maxprod) + minprod = maxprod * broadcast_timeslice( + broadcast_techs(technologies.minimum_service_factor, maxprod) + ) maxdemand = xr.Dataset(dict(mp=minprod)).groupby("region").sum("asset").mp market["consumption"] = drop_timeslice(maxdemand * 0.9) technodata = broadcast_techs(technologies, capacity) From 366c37cef00ec0a6750670bc564b796564fe682a Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 1 Nov 2024 15:57:56 +0000 Subject: [PATCH 56/92] Drop convert_market_timeslice --- src/muse/sectors/sector.py | 22 ---------------------- 1 file changed, 22 deletions(-) diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index 9d10165cf..1b1f81de8 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -265,7 +265,6 @@ def group_assets(x: xr.DataArray) -> xr.DataArray: result = xr.Dataset( dict(supply=supply, consumption=consumption, costs=costs) ) - result = self.convert_market_timeslice(result, mca_market.timeslice) result["comm_usage"] = self.technologies.comm_usage.sel( commodity=result.commodity ) @@ -376,24 +375,3 @@ def agents(self) -> Iterator[AbstractAgent]: """Iterator over all agents in the sector.""" for subsector in self.subsectors: yield from subsector.agents - - @staticmethod - def convert_market_timeslice( - market: xr.Dataset, - timeslice: pd.MultiIndex, - intensive: str | tuple[str] = "prices", - ) -> xr.Dataset: - """Converts market from one to another timeslice.""" - from muse.timeslices import broadcast_timeslice - - if isinstance(intensive, str): - intensive = (intensive,) - - timesliced = {d for d in market.data_vars if "timeslice" in market[d].dims} - - intensives = market[list(timesliced.intersection(intensive))] - if "timeslice" not in intensives.dims: - intensives = broadcast_timeslice(intensives) - extensives = market[list(timesliced.difference(intensives.data_vars))] - others = market[list(set(market.data_vars).difference(timesliced))] - return xr.merge([intensives, extensives, others]) From 908be7be4c9f021bf32e193d9a2e7f243c5155b7 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Mon, 4 Nov 2024 14:44:48 +0000 Subject: [PATCH 57/92] Remove timeslice attribute from sectors --- src/muse/sectors/sector.py | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index 1b1f81de8..7037eeca4 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -7,7 +7,6 @@ cast, ) -import pandas as pd import xarray as xr from muse.agents import AbstractAgent @@ -27,7 +26,6 @@ def factory(cls, name: str, settings: Any) -> Sector: from muse.outputs.sector import factory as ofactory from muse.production import factory as pfactory from muse.readers.toml import read_technodata - from muse.timeslices import TIMESLICE from muse.utilities import nametuple_to_dict # Read sector settings @@ -39,9 +37,6 @@ def factory(cls, name: str, settings: Any) -> Sector: if len(sector_settings["subsectors"]._asdict()) == 0: raise RuntimeError(f"Empty 'subsectors' section in sector {name}") - # Timeslices - timeslices = TIMESLICE.timeslice - # Read technologies technologies = read_technodata(settings, name, settings.time_framework) @@ -93,7 +88,6 @@ def factory(cls, name: str, settings: Any) -> Sector: name, technologies, subsectors=subsectors, - timeslices=timeslices, supply_prod=supply, outputs=outputs, interactions=interactions, @@ -105,7 +99,6 @@ def __init__( name: str, technologies: xr.Dataset, subsectors: Sequence[Subsector] = [], - timeslices: pd.MultiIndex | None = None, interactions: Callable[[Sequence[AbstractAgent]], None] | None = None, interpolation: str = "linear", outputs: Callable | None = None, @@ -121,10 +114,6 @@ def __init__( """Subsectors controlled by this object.""" self.technologies: xr.Dataset = technologies """Parameters describing the sector's technologies.""" - self.timeslices: pd.MultiIndex | None = timeslices - """Timeslice at which this sector operates. - If None, it will operate using the timeslice of the input market. - """ self.interpolation: Mapping[str, Any] = { "method": interpolation, "kwargs": {"fill_value": "extrapolate"}, From 15ebf845d0acfc84573244c38122bb138499b924 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Mon, 4 Nov 2024 20:55:37 +0000 Subject: [PATCH 58/92] Add dummy timeslice_level argument --- src/muse/agents/agent.py | 8 +++- src/muse/constraints.py | 56 +++++++++++++++++------ src/muse/costs.py | 58 +++++++++++++++--------- src/muse/demand_share.py | 33 ++++++++++++-- src/muse/investments.py | 9 +++- src/muse/objectives.py | 45 ++++++++++++------- src/muse/production.py | 28 +++++++++--- src/muse/quantities.py | 84 ++++++++++++++++++++++++----------- src/muse/readers/csv.py | 4 +- src/muse/sectors/sector.py | 14 ++++-- src/muse/sectors/subsector.py | 2 + src/muse/timeslices.py | 11 +++-- 12 files changed, 255 insertions(+), 97 deletions(-) diff --git a/src/muse/agents/agent.py b/src/muse/agents/agent.py index 7c674e7d7..453e571cd 100644 --- a/src/muse/agents/agent.py +++ b/src/muse/agents/agent.py @@ -217,6 +217,7 @@ def __init__( """ self.asset_threshold = asset_threshold """Threshold below which assets are not added.""" + self.timeslice_level = "" @property def forecast_year(self): @@ -273,7 +274,10 @@ def compute_decision( # Compute the objectives objectives = self.objectives( - technologies=techs, demand=reduced_demand, prices=prices + technologies=techs, + demand=reduced_demand, + prices=prices, + timeslice_level=self.timeslice_level, ) # Compute the decision metric @@ -369,6 +373,7 @@ def next( market, technologies, year=current_year, + timeslice_level=self.timeslice_level, ) # Calculate investments @@ -377,6 +382,7 @@ def next( technologies, constraints, year=current_year, + timeslice_level=self.timeslice_level, ) # Add investments diff --git a/src/muse/constraints.py b/src/muse/constraints.py index bb261a0ae..e77f3a60c 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -248,11 +248,20 @@ def constraints( market: xr.Dataset, technologies: xr.Dataset, year: int | None = None, + timeslice_level: str | None = None, ) -> list[Constraint]: if year is None: year = int(market.year.min()) constraints = [ - function(demand, assets, search_space, market, technologies, year=year) + function( + demand, + assets, + search_space, + market, + technologies, + year=year, + timeslice_level=timeslice_level, + ) for function in constraint_closures ] return [constraint for constraint in constraints if constraint is not None] @@ -270,6 +279,7 @@ def max_capacity_expansion( year: int | None = None, forecast: int | None = None, interpolation: str = "linear", + **kwargs, ) -> Constraint: r"""Max-capacity addition, max-capacity growth, and capacity limits constraints. @@ -395,6 +405,7 @@ def demand( year: int | None = None, forecast: int = 5, interpolation: str = "linear", + **kwargs, ) -> Constraint: """Constraints production to meet demand.""" from muse.commodities import is_enduse @@ -418,6 +429,7 @@ def search_space( technologies: xr.Dataset, year: int | None = None, forecast: int = 5, + **kwargs, ) -> Constraint | None: """Removes disabled technologies.""" if search_space.all(): @@ -437,6 +449,8 @@ def max_production( market: xr.Dataset, technologies: xr.Dataset, year: int | None = None, + timeslice_level: str | None = None, + **kwargs, ) -> Constraint: """Constructs constraint between capacity and maximum production. @@ -465,9 +479,9 @@ def max_production( .sel(**kwargs) .drop_vars("technology") ) - capacity = distribute_timeslice(techs.fixed_outputs) * broadcast_timeslice( - techs.utilization_factor - ) + capacity = distribute_timeslice( + techs.fixed_outputs, level=timeslice_level + ) * broadcast_timeslice(techs.utilization_factor, level=timeslice_level) if "asset" not in capacity.dims and "asset" in search_space.dims: capacity = capacity.expand_dims(asset=search_space.asset) production = ones_like(capacity) @@ -484,8 +498,8 @@ def max_production( maxadd = maxadd.rename(technology="replacement") maxadd = maxadd.where(maxadd == 0, 0.0) maxadd = maxadd.where(maxadd > 0, -1.0) - capacity = capacity * broadcast_timeslice(maxadd) - production = production * broadcast_timeslice(maxadd) + capacity = capacity * broadcast_timeslice(maxadd, level=timeslice_level) + production = production * broadcast_timeslice(maxadd, level=timeslice_level) b = b.rename(region="src_region") return xr.Dataset( dict(capacity=-cast(np.ndarray, capacity), production=production, b=b), @@ -501,6 +515,8 @@ def demand_limiting_capacity( market: xr.Dataset, technologies: xr.Dataset, year: int | None = None, + timeslice_level: str | None = None, + **kwargs, ) -> Constraint: """Limits the maximum combined capacity to match the demand. @@ -516,7 +532,13 @@ def demand_limiting_capacity( """ # We start with the maximum production constraint and the demand constraint capacity_constraint = max_production( - demand_, assets, search_space, market, technologies, year=year + demand_, + assets, + search_space, + market, + technologies, + year=year, + timeslice_level=timeslice_level, ) demand_constraint = demand( demand_, assets, search_space, market, technologies, year=year @@ -709,6 +731,8 @@ def minimum_service( market: xr.Dataset, technologies: xr.Dataset, year: int | None = None, + timeslice_level: str | None = None, + **kwargs, ) -> Constraint | None: """Constructs constraint between capacity and minimum service.""" from xarray import ones_like, zeros_like @@ -737,9 +761,9 @@ def minimum_service( .sel(**kwargs) .drop_vars("technology") ) - capacity = distribute_timeslice(techs.fixed_outputs) * broadcast_timeslice( - techs.minimum_service_factor - ) + capacity = distribute_timeslice( + techs.fixed_outputs, level=timeslice_level + ) * broadcast_timeslice(techs.minimum_service_factor, level=timeslice_level) if "asset" not in capacity.dims: capacity = capacity.expand_dims(asset=search_space.asset) production = ones_like(capacity) @@ -750,7 +774,9 @@ def minimum_service( ) -def lp_costs(technologies: xr.Dataset, costs: xr.DataArray) -> xr.Dataset: +def lp_costs( + technologies: xr.Dataset, costs: xr.DataArray, timeslice_level: str | None = None +) -> xr.Dataset: """Creates costs for solving with scipy's LP solver. Example: @@ -813,7 +839,10 @@ def lp_costs(technologies: xr.Dataset, costs: xr.DataArray) -> xr.Dataset: selection["region"] = costs.region fouts = technologies.fixed_outputs.sel(selection).rename(technology="replacement") - production = zeros_like(broadcast_timeslice(costs) * distribute_timeslice(fouts)) + production = zeros_like( + broadcast_timeslice(costs, level=timeslice_level) + * distribute_timeslice(fouts, level=timeslice_level) + ) for dim in production.dims: if isinstance(production.get_index(dim), pd.MultiIndex): production = drop_timeslice(production) @@ -1166,8 +1195,9 @@ def factory( technologies: xr.Dataset, costs: xr.DataArray, *constraints: Constraint, + timeslice_level: str | None = None, ) -> ScipyAdapter: - lpcosts = lp_costs(technologies, costs) + lpcosts = lp_costs(technologies, costs, timeslice_level=timeslice_level) data = cls._unified_dataset(technologies, lpcosts, *constraints) diff --git a/src/muse/costs.py b/src/muse/costs.py index bc35f2ad4..90c43388e 100644 --- a/src/muse/costs.py +++ b/src/muse/costs.py @@ -23,6 +23,7 @@ def net_present_value( capacity: xr.DataArray, production: xr.DataArray, year: int, + timeslice_level: str | None = None, ) -> xr.DataArray: """Net present value (NPV) of the relevant technologies. @@ -84,7 +85,8 @@ def net_present_value( years - year + 1, interest_rate=techs.interest_rate, mask=years <= year + life, - ) + ), + level=timeslice_level, ) # Filters @@ -99,7 +101,7 @@ def net_present_value( # Cost of installed capacity installed_capacity_costs = distribute_timeslice( - techs.cap_par * (capacity**techs.cap_exp), + techs.cap_par * (capacity**techs.cap_exp), level=timeslice_level ) # Cost related to environmental products @@ -121,11 +123,11 @@ def net_present_value( # Fixed and Variable costs fixed_costs = distribute_timeslice( - techs.fix_par * (capacity**techs.fix_exp), + techs.fix_par * (capacity**techs.fix_exp), level=timeslice_level ) - variable_costs = broadcast_timeslice(techs.var_par) * ( + variable_costs = broadcast_timeslice(techs.var_par, level=timeslice_level) * ( (production.sel(commodity=products).sum("commodity")) - ** broadcast_timeslice(techs.var_exp) + ** broadcast_timeslice(techs.var_exp, level=timeslice_level) ) assert set(fixed_costs.dims) == set(variable_costs.dims) fixed_and_variable_costs = ((fixed_costs + variable_costs) * rates).sum("year") @@ -176,6 +178,7 @@ def equivalent_annual_cost( capacity: xr.DataArray, production: xr.DataArray, year: int, + timeslice_level: str | None = None, ) -> xr.DataArray: """Equivalent annual costs (or annualized cost) of a technology. @@ -199,7 +202,7 @@ def equivalent_annual_cost( """ npc = net_present_cost(technologies, prices, capacity, production, year) crf = capital_recovery_factor(technologies) - return npc * broadcast_timeslice(crf) + return npc * broadcast_timeslice(crf, level=timeslice_level) def lifetime_levelized_cost_of_energy( @@ -208,6 +211,7 @@ def lifetime_levelized_cost_of_energy( capacity: xr.DataArray, production: xr.DataArray, year: int, + timeslice_level: str | None = None, ) -> xr.DataArray: """Levelized cost of energy (LCOE) of technologies over their lifetime. @@ -253,7 +257,8 @@ def lifetime_levelized_cost_of_energy( years=years - year + 1, interest_rate=techs.interest_rate, mask=years <= year + life, - ) + ), + level=timeslice_level, ) # Filters @@ -264,7 +269,7 @@ def lifetime_levelized_cost_of_energy( # Cost of installed capacity installed_capacity_costs = distribute_timeslice( - techs.cap_par * (capacity**techs.cap_exp), + techs.cap_par * (capacity**techs.cap_exp), level=timeslice_level ) # Cost related to environmental products @@ -277,7 +282,12 @@ def lifetime_levelized_cost_of_energy( # Fuel/energy costs prices_fuel = filter_input(prices, commodity=fuels, year=years.values) - fuel = consumption(technologies=techs, production=production, prices=prices) + fuel = consumption( + technologies=techs, + production=production, + prices=prices, + timeslice_level=timeslice_level, + ) fuel_costs = (fuel * prices_fuel * rates).sum(("commodity", "year")) # Cost related to material other than fuel/energy and environmentals @@ -286,11 +296,12 @@ def lifetime_levelized_cost_of_energy( # Fixed and Variable costs fixed_costs = distribute_timeslice( - techs.fix_par * (capacity**techs.fix_exp), + techs.fix_par * (capacity**techs.fix_exp), level=timeslice_level ) variable_costs = ( - broadcast_timeslice(techs.var_par) - * production.sel(commodity=products) ** broadcast_timeslice(techs.var_exp) + broadcast_timeslice(techs.var_par, level=timeslice_level) + * production.sel(commodity=products) + ** broadcast_timeslice(techs.var_exp, level=timeslice_level) ).sum("commodity") fixed_and_variable_costs = ((fixed_costs + variable_costs) * rates).sum("year") denominator = production.where(production > 0.0, 1e-6) @@ -310,6 +321,7 @@ def annual_levelized_cost_of_energy( prices: xr.DataArray, interpolation: str = "linear", fill_value: Union[int, str] = "extrapolate", + timeslice_level: str | None = None, **filters, ) -> xr.DataArray: """Undiscounted levelized cost of energy (LCOE) of technologies on each given year. @@ -373,31 +385,33 @@ def annual_levelized_cost_of_energy( # Capital costs annualized_capital_costs = distribute_timeslice( - techs.cap_par * rates - ) / broadcast_timeslice(techs.utilization_factor) + techs.cap_par * rates, level=timeslice_level + ) / broadcast_timeslice(techs.utilization_factor, level=timeslice_level) # Fixed and variable running costs o_and_e_costs = distribute_timeslice( - techs.fix_par + techs.var_par - ) / broadcast_timeslice(techs.utilization_factor) + techs.fix_par + techs.var_par, level=timeslice_level + ) / broadcast_timeslice(techs.utilization_factor, level=timeslice_level) # Fuel costs from fixed and flexible inputs - fuel_costs = (distribute_timeslice(techs.fixed_inputs) * prices).sum("commodity") - fuel_costs += (distribute_timeslice(techs.flexible_inputs) * prices).sum( - "commodity" - ) + fuel_costs = ( + distribute_timeslice(techs.fixed_inputs, level=timeslice_level) * prices + ).sum("commodity") + fuel_costs += ( + distribute_timeslice(techs.flexible_inputs, level=timeslice_level) * prices + ).sum("commodity") # Environmental costs if "region" in techs.dims: env_costs = ( - (distribute_timeslice(techs.fixed_outputs) * prices) + (distribute_timeslice(techs.fixed_outputs, level=timeslice_level) * prices) .sel(region=techs.region) .sel(commodity=is_pollutant(techs.comm_usage)) .sum("commodity") ) else: env_costs = ( - (distribute_timeslice(techs.fixed_outputs) * prices) + (distribute_timeslice(techs.fixed_outputs, level=timeslice_level) * prices) .sel(commodity=is_pollutant(techs.comm_usage)) .sum("commodity") ) diff --git a/src/muse/demand_share.py b/src/muse/demand_share.py index 9a672cb01..22806d664 100644 --- a/src/muse/demand_share.py +++ b/src/muse/demand_share.py @@ -115,6 +115,7 @@ def new_and_retro( current_year: int, forecast: int, production: Union[str, Mapping, Callable] = "maximum_production", + timeslice_level: str | None = None, ) -> xr.DataArray: r"""Splits demand across new and retro agents. @@ -236,6 +237,7 @@ def decommissioning(capacity): technologies, capacity, year=[current_year, current_year + forecast], + timeslice_level=timeslice_level, ).squeeze("year") capacity = reduce_assets([u.assets.capacity for u in agents]) @@ -247,6 +249,7 @@ def decommissioning(capacity): production=production, current_year=current_year, forecast=forecast, + timeslice_level=timeslice_level, ) demands = demands.where( @@ -311,6 +314,7 @@ def decommissioning(capacity): maximum_production, technologies=regional_techs, year=current_year, + timeslice_level=timeslice_level, ), id_to_nquantity, ) @@ -329,6 +333,7 @@ def standard_demand( current_year: int, forecast: int, production: Union[str, Mapping, Callable] = "maximum_production", + timeslice_level: str | None = None, ) -> xr.DataArray: r"""Splits demand across new agents. @@ -363,6 +368,7 @@ def decommissioning(capacity): technologies, capacity, year=[current_year, current_year + forecast], + timeslice_level=timeslice_level, ).squeeze("year") # Make sure there are no retrofit agents @@ -381,6 +387,7 @@ def decommissioning(capacity): production=production, current_year=current_year, forecast=forecast, + timeslice_level=timeslice_level, ) # Only consider end-use commodities @@ -416,6 +423,7 @@ def decommissioning(capacity): maximum_production, technologies=technologies.sel(region=region), year=current_year, + timeslice_level=timeslice_level, ), id_to_quantity, ) @@ -438,6 +446,7 @@ def unmet_forecasted_demand( current_year: int, forecast: int, production: Union[str, Mapping, Callable] = "maximum_production", + timeslice_level: str | None = None, ) -> xr.DataArray: """Forecast demand that cannot be serviced by non-decommissioned current assets.""" from muse.commodities import is_enduse @@ -448,7 +457,9 @@ def unmet_forecasted_demand( smarket: xr.Dataset = market.where(is_enduse(comm_usage), 0).interp(year=year) capacity = reduce_assets([u.assets.capacity.interp(year=year) for u in agents]) capacity = cast(xr.DataArray, capacity) - result = unmet_demand(smarket, capacity, technologies, production) + result = unmet_demand( + smarket, capacity, technologies, production, timeslice_level=timeslice_level + ) if "year" in result.dims: result = result.squeeze("year") return result @@ -511,6 +522,7 @@ def unmet_demand( capacity: xr.DataArray, technologies: xr.Dataset, production: Union[str, Mapping, Callable] = "maximum_production", + timeslice_level: str | None = None, ): r"""Share of the demand that cannot be serviced by the existing assets. @@ -531,7 +543,12 @@ def unmet_demand( assert callable(prod_method) # Calculate production by existing assets - produced = prod_method(market=market, capacity=capacity, technologies=technologies) + produced = prod_method( + market=market, + capacity=capacity, + technologies=technologies, + timeslice_level=timeslice_level, + ) # Total commodity production by summing over assets if "dst_region" in produced.dims: @@ -552,6 +569,7 @@ def new_consumption( technologies: xr.Dataset, current_year: int, forecast: int, + timeslice_level: str | None = None, ) -> xr.DataArray: r"""Computes share of the demand attributed to new agents. @@ -580,7 +598,7 @@ def new_consumption( # Calculate the increase in consumption over the forecast period delta = (forecasted.consumption - current.consumption).clip(min=0) - missing = unmet_demand(current, capa, technologies) + missing = unmet_demand(current, capa, technologies, timeslice_level=timeslice_level) consumption = minimum(delta, missing) return consumption @@ -592,6 +610,7 @@ def new_and_retro_demands( current_year: int, forecast: int, production: Union[str, Mapping, Callable] = "maximum_production", + timeslice_level: str | None = None, ) -> xr.Dataset: """Splits demand into *new* and *retrofit* demand. @@ -622,7 +641,12 @@ def new_and_retro_demands( # Calculate demand to allocate to "new" agents new_demand = new_consumption( - capa, smarket, technologies, current_year=current_year, forecast=forecast + capa, + smarket, + technologies, + current_year=current_year, + forecast=forecast, + timeslice_level=timeslice_level, ) if "year" in new_demand.dims: new_demand = new_demand.squeeze("year") @@ -633,6 +657,7 @@ def new_and_retro_demands( smarket.sel(year=current_year + forecast), capa.sel(year=current_year + forecast), technologies, + timeslice_level=timeslice_level, ) .groupby("region") .sum("asset") diff --git a/src/muse/investments.py b/src/muse/investments.py index 67ac780d5..05ed89771 100644 --- a/src/muse/investments.py +++ b/src/muse/investments.py @@ -269,6 +269,7 @@ def scipy_match_demand( technologies: xr.Dataset, constraints: list[Constraint], year: Optional[int] = None, + timeslice_level: Optional[str] = None, **options, ) -> xr.DataArray: from logging import getLogger @@ -289,7 +290,9 @@ def scipy_match_demand( techs = technologies # Run scipy optimization with highs solver - adapter = ScipyAdapter.factory(techs, cast(np.ndarray, costs), *constraints) + adapter = ScipyAdapter.factory( + techs, cast(np.ndarray, costs), *constraints, timeslice_level=timeslice_level + ) res = linprog(**adapter.kwargs, method="highs") # Backup: try with highs-ipm @@ -386,4 +389,6 @@ def default_to_scipy(): def timeslice_op(x: xr.DataArray) -> xr.DataArray: from muse.timeslices import TIMESLICE, broadcast_timeslice - return (x / (TIMESLICE / broadcast_timeslice(TIMESLICE.sum()))).max("timeslice") + return (x / (TIMESLICE / broadcast_timeslice(TIMESLICE.sum(), level=""))).max( + "timeslice" + ) diff --git a/src/muse/objectives.py b/src/muse/objectives.py index 5e51d7ef0..bdbce691c 100644 --- a/src/muse/objectives.py +++ b/src/muse/objectives.py @@ -130,6 +130,7 @@ def objectives( technologies: xr.Dataset, demand: xr.DataArray, prices: xr.DataArray, + timeslice_level: str | None = None, *args, **kwargs, ) -> xr.Dataset: @@ -138,10 +139,15 @@ def objectives( result = xr.Dataset() for name, objective in functions: obj = objective( - technologies=technologies, demand=demand, prices=prices, *args, **kwargs + technologies=technologies, + demand=demand, + prices=prices, + timeslice_level=timeslice_level, + *args, + **kwargs, ) if "timeslice" not in obj.dims: - obj = broadcast_timeslice(obj) + obj = broadcast_timeslice(obj, level=timeslice_level) if "timeslice" in result.dims: obj = drop_timeslice(obj) result[name] = obj @@ -381,6 +387,7 @@ def lifetime_levelized_cost_of_energy( technologies: xr.Dataset, demand: xr.DataArray, prices: xr.DataArray, + timeslice_level: str | None = None, *args, **kwargs, ): @@ -395,11 +402,13 @@ def lifetime_levelized_cost_of_energy( from muse.quantities import capacity_to_service_demand from muse.timeslices import broadcast_timeslice, distribute_timeslice - capacity = capacity_to_service_demand(technologies=technologies, demand=demand) + capacity = capacity_to_service_demand( + technologies=technologies, demand=demand, timeslice_level=timeslice_level + ) production = ( - broadcast_timeslice(capacity) - * distribute_timeslice(technologies.fixed_outputs) - * broadcast_timeslice(technologies.utilization_factor) + broadcast_timeslice(capacity, level=timeslice_level) + * distribute_timeslice(technologies.fixed_outputs, level=timeslice_level) + * broadcast_timeslice(technologies.utilization_factor, level=timeslice_level) ) results = LCOE( @@ -408,6 +417,7 @@ def lifetime_levelized_cost_of_energy( capacity=capacity, production=production, year=demand.year.item(), + timeslice_level=timeslice_level, ) return results.where(np.isfinite(results)).fillna(0.0) @@ -418,6 +428,7 @@ def net_present_value( technologies: xr.Dataset, demand: xr.DataArray, prices: xr.DataArray, + timeslice_level: str | None = None, *args, **kwargs, ): @@ -431,9 +442,9 @@ def net_present_value( capacity = capacity_to_service_demand(technologies=technologies, demand=demand) production = ( - broadcast_timeslice(capacity) - * distribute_timeslice(technologies.fixed_outputs) - * broadcast_timeslice(technologies.utilization_factor) + broadcast_timeslice(capacity, level=timeslice_level) + * distribute_timeslice(technologies.fixed_outputs, level=timeslice_level) + * broadcast_timeslice(technologies.utilization_factor, level=timeslice_level) ) results = NPV( @@ -442,6 +453,7 @@ def net_present_value( capacity=capacity, production=production, year=demand.year.item(), + timeslice_level=timeslice_level, ) return results @@ -451,6 +463,7 @@ def net_present_cost( technologies: xr.Dataset, demand: xr.DataArray, prices: xr.DataArray, + timeslice_level: str | None = None, *args, **kwargs, ): @@ -464,9 +477,9 @@ def net_present_cost( capacity = capacity_to_service_demand(technologies=technologies, demand=demand) production = ( - broadcast_timeslice(capacity) - * distribute_timeslice(technologies.fixed_outputs) - * broadcast_timeslice(technologies.utilization_factor) + broadcast_timeslice(capacity, level=timeslice_level) + * distribute_timeslice(technologies.fixed_outputs, level=timeslice_level) + * broadcast_timeslice(technologies.utilization_factor, level=timeslice_level) ) results = NPC( @@ -484,6 +497,7 @@ def equivalent_annual_cost( technologies: xr.Dataset, demand: xr.DataArray, prices: xr.DataArray, + timeslice_level: str | None = None, *args, **kwargs, ): @@ -497,9 +511,9 @@ def equivalent_annual_cost( capacity = capacity_to_service_demand(technologies=technologies, demand=demand) production = ( - broadcast_timeslice(capacity) - * distribute_timeslice(technologies.fixed_outputs) - * broadcast_timeslice(technologies.utilization_factor) + broadcast_timeslice(capacity, level=timeslice_level) + * distribute_timeslice(technologies.fixed_outputs, level=timeslice_level) + * broadcast_timeslice(technologies.utilization_factor, level=timeslice_level) ) results = EAC( @@ -508,5 +522,6 @@ def equivalent_annual_cost( capacity=capacity, production=production, year=demand.year.item(), + timeslice_level=timeslice_level, ) return results diff --git a/src/muse/production.py b/src/muse/production.py index b23d4666d..5d2fbe651 100644 --- a/src/muse/production.py +++ b/src/muse/production.py @@ -99,7 +99,10 @@ def factory( @register_production(name=("max", "maximum")) def maximum_production( - market: xr.Dataset, capacity: xr.DataArray, technologies: xr.Dataset + market: xr.Dataset, + capacity: xr.DataArray, + technologies: xr.Dataset, + timeslice_level: str | None = None, ) -> xr.DataArray: """Production when running at full capacity. @@ -108,12 +111,15 @@ def maximum_production( """ from muse.quantities import maximum_production - return maximum_production(technologies, capacity) + return maximum_production(technologies, capacity, timeslice_level) @register_production(name=("share", "shares")) def supply( - market: xr.Dataset, capacity: xr.DataArray, technologies: xr.Dataset + market: xr.Dataset, + capacity: xr.DataArray, + technologies: xr.Dataset, + timeslice_level: str | None = None, ) -> xr.DataArray: """Service current demand equally from all assets. @@ -122,7 +128,9 @@ def supply( """ from muse.quantities import supply - return supply(capacity, market.consumption, technologies) + return supply( + capacity, market.consumption, technologies, timeslice_level=timeslice_level + ) @register_production(name="match") @@ -131,6 +139,7 @@ def demand_matched_production( capacity: xr.DataArray, technologies: xr.Dataset, costs: str = "prices", + timeslice_level: str | None = None, ) -> xr.DataArray: """Production from matching demand via annual lcoe.""" from muse.costs import annual_levelized_cost_of_energy as lcoe @@ -143,7 +152,9 @@ def demand_matched_production( prices = gross_margin(technologies, capacity, market.prices) elif costs == "lcoe": prices = lcoe( - market.prices, cast(xr.Dataset, broadcast_techs(technologies, capacity)) + market.prices, + cast(xr.Dataset, broadcast_techs(technologies, capacity)), + timeslice_level=timeslice_level, ) else: raise ValueError(f"Unknown costs option {costs}") @@ -153,6 +164,7 @@ def demand_matched_production( prices=prices, capacity=capacity, technologies=technologies, + timeslice_level=timeslice_level, ) @@ -164,6 +176,7 @@ def costed_production( costs: Union[xr.DataArray, Callable, str] = "alcoe", with_minimum_service: bool = True, with_emission: bool = True, + timeslice_level: str | None = None, ) -> xr.DataArray: """Computes production from ranked assets. @@ -187,7 +200,9 @@ def costed_production( if callable(costs): technodata = cast(xr.Dataset, broadcast_techs(technologies, capacity)) costs = costs( - prices=market.prices.sel(region=technodata.region), technologies=technodata + prices=market.prices.sel(region=technodata.region), + technologies=technodata, + timeslice_level=timeslice_level, ) else: costs = costs @@ -199,6 +214,7 @@ def costed_production( capacity, technologies, with_minimum_service=with_minimum_service, + timeslice_level=timeslice_level, ) # add production of environmental pollutants if with_emission: diff --git a/src/muse/quantities.py b/src/muse/quantities.py index e172099cf..d2a3b82c3 100644 --- a/src/muse/quantities.py +++ b/src/muse/quantities.py @@ -20,6 +20,7 @@ def supply( technologies: Union[xr.Dataset, xr.DataArray], interpolation: str = "linear", production_method: Optional[Callable] = None, + timeslice_level: str | None = None, ) -> xr.DataArray: """Production and emission for a given capacity servicing a given demand. @@ -49,8 +50,10 @@ def supply( if production_method is None: production_method = maximum_production - maxprod = production_method(technologies, capacity) - minprod = minimum_production(technologies, capacity) + maxprod = production_method(technologies, capacity, timeslice_level=timeslice_level) + minprod = minimum_production( + technologies, capacity, timeslice_level=timeslice_level + ) size = np.array(maxprod.region).size # in presence of trade demand needs to map maxprod dst_region if ( @@ -90,10 +93,14 @@ def supply( expanded_demand = (demand * maxprod / maxprod.sum(demsum)).fillna(0) expanded_maxprod = ( - maxprod * demand / broadcast_timeslice(demand.sum(prodsum)) + maxprod + * demand + / broadcast_timeslice(demand.sum(prodsum), level=timeslice_level) ).fillna(0) expanded_minprod = ( - minprod * demand / broadcast_timeslice(demand.sum(prodsum)) + minprod + * demand + / broadcast_timeslice(demand.sum(prodsum), level=timeslice_level) ).fillna(0) expanded_demand = expanded_demand.reindex_like(maxprod) expanded_minprod = expanded_minprod.reindex_like(maxprod) @@ -105,9 +112,9 @@ def supply( # add production of environmental pollutants env = is_pollutant(technologies.comm_usage) - result[{"commodity": env}] = emission(result, technologies.fixed_outputs).transpose( - *result.dims - ) + result[{"commodity": env}] = emission( + result, technologies.fixed_outputs, timeslice_level=timeslice_level + ).transpose(*result.dims) result[ {"commodity": ~check_usage(technologies.comm_usage, CommodityUsage.PRODUCT)} ] = 0 @@ -115,7 +122,11 @@ def supply( return result -def emission(production: xr.DataArray, fixed_outputs: xr.DataArray): +def emission( + production: xr.DataArray, + fixed_outputs: xr.DataArray, + timeslice_level: str | None = None, +): """Computes emission from current products. Emissions are computed as `sum(product) * fixed_outputs`. @@ -140,12 +151,15 @@ def emission(production: xr.DataArray, fixed_outputs: xr.DataArray): envs = is_pollutant(fouts.comm_usage) enduses = is_enduse(fouts.comm_usage) return production.sel(commodity=enduses).sum("commodity") * broadcast_timeslice( - fouts.sel(commodity=envs) + fouts.sel(commodity=envs), level=timeslice_level ) def gross_margin( - technologies: xr.Dataset, capacity: xr.DataArray, prices: xr.Dataset + technologies: xr.Dataset, + capacity: xr.DataArray, + prices: xr.Dataset, + timeslice_level: str | None = None, ) -> xr.DataArray: """The percentage of revenue after direct expenses have been subtracted. @@ -198,13 +212,18 @@ def gross_margin( # Variable costs depend on factors such as labour variable_costs = distribute_timeslice( var_par * ((fixed_outputs.sel(commodity=enduses)).sum("commodity")) ** var_exp, + level=timeslice_level, ) # The individual prices are selected # costs due to consumables, direct inputs - consumption_costs = (prices * distribute_timeslice(fixed_inputs)).sum("commodity") + consumption_costs = ( + prices * distribute_timeslice(fixed_inputs, level=timeslice_level) + ).sum("commodity") # costs due to pollutants - production_costs = prices * distribute_timeslice(fixed_outputs) + production_costs = prices * distribute_timeslice( + fixed_outputs, level=timeslice_level + ) environmental_costs = (production_costs.sel(commodity=environmentals)).sum( "commodity" ) @@ -223,6 +242,7 @@ def decommissioning_demand( technologies: xr.Dataset, capacity: xr.DataArray, year: Optional[Sequence[int]] = None, + timeslice_level: str | None = None, ) -> xr.DataArray: r"""Computes demand from process decommissioning. @@ -264,6 +284,7 @@ def decommissioning_demand( return maximum_production( technologies, capacity_decrease, + timeslice_level=timeslice_level, ).clip(min=0) @@ -271,6 +292,7 @@ def consumption( technologies: xr.Dataset, production: xr.DataArray, prices: Optional[xr.DataArray] = None, + timeslice_level: str | None = None, **kwargs, ) -> xr.DataArray: """Commodity consumption when fulfilling the whole production. @@ -293,7 +315,7 @@ def consumption( production = production.sel(commodity=is_enduse(comm_usage)).sum("commodity") params_fuels = is_fuel(params.comm_usage) consumption = production * broadcast_timeslice( - params.fixed_inputs.where(params_fuels, 0) + params.fixed_inputs.where(params_fuels, 0), level=timeslice_level ) if prices is None: @@ -315,7 +337,9 @@ def consumption( ] # add consumption from cheapest fuel assert all(flexs.commodity.values == consumption.commodity.values) - flex = flexs.where(minprices == broadcast_timeslice(flexs.commodity), 0) + flex = flexs.where( + minprices == broadcast_timeslice(flexs.commodity, level=timeslice_level), 0 + ) flex = flex / (flex > 0).sum("commodity").clip(min=1) return consumption + flex * production @@ -323,6 +347,7 @@ def consumption( def maximum_production( technologies: xr.Dataset, capacity: xr.DataArray, + timeslice_level: str | None = None, **filters, ): r"""Production for a given capacity. @@ -371,9 +396,9 @@ def maximum_production( btechs, **{k: v for k, v in filters.items() if k in btechs.dims} ) result = ( - broadcast_timeslice(capa) - * distribute_timeslice(ftechs.fixed_outputs) - * broadcast_timeslice(ftechs.utilization_factor) + broadcast_timeslice(capa, level=timeslice_level) + * distribute_timeslice(ftechs.fixed_outputs, level=timeslice_level) + * broadcast_timeslice(ftechs.utilization_factor, level=timeslice_level) ) return result.where(is_enduse(result.comm_usage), 0) @@ -410,6 +435,7 @@ def capacity_in_use( production: xr.DataArray, technologies: xr.Dataset, max_dim: Optional[Union[str, tuple[str]]] = "commodity", + timeslice_level: str | None = None, **filters, ): """Capacity-in-use for each asset, given production. @@ -447,7 +473,9 @@ def capacity_in_use( ) factor = 1 / (ftechs.fixed_outputs * ftechs.utilization_factor) - capa_in_use = (prod * broadcast_timeslice(factor)).where(~np.isinf(factor), 0) + capa_in_use = (prod * broadcast_timeslice(factor, level=timeslice_level)).where( + ~np.isinf(factor), 0 + ) capa_in_use = capa_in_use.where( is_enduse(technologies.comm_usage.sel(commodity=capa_in_use.commodity)), 0 @@ -464,6 +492,7 @@ def costed_production( capacity: xr.DataArray, technologies: xr.Dataset, with_minimum_service: bool = True, + timeslice_level: str | None = None, ) -> xr.DataArray: """Computes production from ranked assets. @@ -507,7 +536,9 @@ def group_assets(x: xr.DataArray) -> xr.DataArray: else: if hasattr(technodata, "minimum_service_factor"): production = ( - broadcast_timeslice(technodata.minimum_service_factor) + broadcast_timeslice( + technodata.minimum_service_factor, level=timeslice_level + ) * constraints.maxprod ) else: @@ -542,6 +573,7 @@ def group_assets(x: xr.DataArray) -> xr.DataArray: def minimum_production( technologies: xr.Dataset, capacity: xr.DataArray, + timeslice_level: str | None = None, **filters, ): r"""Minimum production for a given capacity. @@ -585,7 +617,7 @@ def minimum_production( ) if "minimum_service_factor" not in technologies: - return broadcast_timeslice(xr.zeros_like(capa)) + return broadcast_timeslice(xr.zeros_like(capa), level=timeslice_level) btechs = broadcast_techs( # type: ignore cast( @@ -598,9 +630,9 @@ def minimum_production( btechs, **{k: v for k, v in filters.items() if k in btechs.dims} ) result = ( - broadcast_timeslice(capa) - * distribute_timeslice(ftechs.fixed_outputs) - * broadcast_timeslice(ftechs.minimum_service_factor) + broadcast_timeslice(capa, level=timeslice_level) + * distribute_timeslice(ftechs.fixed_outputs, level=timeslice_level) + * broadcast_timeslice(ftechs.minimum_service_factor, level=timeslice_level) ) return result.where(is_enduse(result.comm_usage), 0) @@ -608,12 +640,14 @@ def minimum_production( def capacity_to_service_demand( demand: xr.DataArray, technologies: xr.Dataset, + timeslice_level: str | None = None, ) -> xr.DataArray: """Minimum capacity required to fulfill the demand.""" from muse.timeslices import broadcast_timeslice, distribute_timeslice timeslice_outputs = distribute_timeslice( - technologies.fixed_outputs.sel(commodity=demand.commodity) - ) * broadcast_timeslice(technologies.utilization_factor) + technologies.fixed_outputs.sel(commodity=demand.commodity), + level=timeslice_level, + ) * broadcast_timeslice(technologies.utilization_factor, level=timeslice_level) capa_to_service_demand = demand / timeslice_outputs return capa_to_service_demand.max(("commodity", "timeslice")) diff --git a/src/muse/readers/csv.py b/src/muse/readers/csv.py index 5438dfa4b..a768bccd6 100644 --- a/src/muse/readers/csv.py +++ b/src/muse/readers/csv.py @@ -618,8 +618,8 @@ def read_initial_market( getLogger(__name__).info("Base year import not provided. Set to zero.") base_year_import = xr.zeros_like(projections) - base_year_export = distribute_timeslice(base_year_export) - base_year_import = distribute_timeslice(base_year_import) + base_year_export = distribute_timeslice(base_year_export, level="") + base_year_import = distribute_timeslice(base_year_import, level="") base_year_export.name = "exports" base_year_import.name = "imports" diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index 7037eeca4..5fe9272e5 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -109,6 +109,7 @@ def __init__( from muse.production import maximum_production self.name: str = name + self.timeslice_level = "" """Name of the sector.""" self.subsectors: Sequence[Subsector] = list(subsectors) """Subsectors controlled by this object.""" @@ -276,18 +277,25 @@ def market_variables(self, market: xr.Dataset, technologies: xr.Dataset) -> Any: # Calculate supply supply = self.supply_prod( - market=market, capacity=capacity, technologies=technologies + market=market, + capacity=capacity, + technologies=technologies, + timeslice_level=self.timeslice_level, ) # Calculate consumption - consume = consumption(technologies, supply, market.prices) + consume = consumption( + technologies, supply, market.prices, timeslice_level=self.timeslice_level + ) # Calculate commodity prices technodata = cast(xr.Dataset, broadcast_techs(technologies, supply)) costs = supply_cost( supply.where(~is_pollutant(supply.comm_usage), 0), annual_levelized_cost_of_energy( - prices=market.prices.sel(region=supply.region), technologies=technodata + prices=market.prices.sel(region=supply.region), + technologies=technodata, + timeslice_level=self.timeslice_level, ), asset_dim="asset", ) diff --git a/src/muse/sectors/subsector.py b/src/muse/sectors/subsector.py index edf82a191..8795725b6 100644 --- a/src/muse/sectors/subsector.py +++ b/src/muse/sectors/subsector.py @@ -39,6 +39,7 @@ def __init__( self.forecast = forecast self.name = name self.expand_market_prices = expand_market_prices + self.timeslice_level = "" """Whether to expand prices to include destination region. If ``True``, the input market prices are expanded of the missing "dst_region" @@ -97,6 +98,7 @@ def aggregate_lp( technologies, current_year=current_year, forecast=self.forecast, + timeslice_level=self.timeslice_level, ) if "dst_region" in demands.dims: diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index d22ab791f..4a4b64658 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -105,12 +105,15 @@ def setup_module(settings: Union[str, Mapping]): TIMESLICE = read_timeslices(settings) -def broadcast_timeslice(x, ts=None): +def broadcast_timeslice(x, ts=None, level=None): from xarray import Coordinates if ts is None: ts = TIMESLICE + if level is None: + pass + # If x already has timeslices, check that it is matches the reference timeslice. if "timeslice" in x.dims: if x.timeslice.reset_coords(drop=True).equals(ts.timeslice): @@ -122,12 +125,12 @@ def broadcast_timeslice(x, ts=None): return extensive -def distribute_timeslice(x, ts=None): +def distribute_timeslice(x, ts=None, level=None): if ts is None: ts = TIMESLICE - extensive = broadcast_timeslice(x, ts) - return extensive * (ts / broadcast_timeslice(ts.sum())) + extensive = broadcast_timeslice(x, ts, level) + return extensive * (ts / broadcast_timeslice(ts.sum(), level=level)) def drop_timeslice(data: DataArray) -> DataArray: From 8b3679742f2132dd27f5350ab6feb34d292bf5e5 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Mon, 4 Nov 2024 22:05:35 +0000 Subject: [PATCH 59/92] Read timeslice_level from settings file --- src/muse/agents/agent.py | 5 ++++- src/muse/sectors/sector.py | 4 +++- src/muse/sectors/subsector.py | 6 +++++- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/src/muse/agents/agent.py b/src/muse/agents/agent.py index 453e571cd..2317fa7f0 100644 --- a/src/muse/agents/agent.py +++ b/src/muse/agents/agent.py @@ -23,6 +23,7 @@ def __init__( interpolation: str = "linear", category: Optional[str] = None, quantity: Optional[float] = 1, + timeslice_level: str | None = None, ): """Creates a standard MUSE agent. @@ -57,6 +58,7 @@ def __init__( """Attribute to classify different sets of agents.""" self.quantity = quantity """Attribute to classify different agents' share of the population.""" + self.timeslice_level = timeslice_level def filter_input( self, @@ -117,6 +119,7 @@ def __init__( asset_threshold: float = 1e-4, quantity: Optional[float] = 1, spend_limit: int = 0, + timeslice_level: str | None = None, **kwargs, ): """Creates a standard agent. @@ -158,6 +161,7 @@ def __init__( interpolation=interpolation, category=category, quantity=quantity, + timeslice_level=timeslice_level, ) self.year = year @@ -217,7 +221,6 @@ def __init__( """ self.asset_threshold = asset_threshold """Threshold below which assets are not added.""" - self.timeslice_level = "" @property def forecast_year(self): diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index 5fe9272e5..bfcc38f36 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -48,6 +48,7 @@ def factory(cls, name: str, settings: Any) -> Sector: regions=settings.regions, current_year=int(min(settings.time_framework)), name=subsec_name, + timeslice_level=sector_settings.get("timeslice_level", None), ) for subsec_name, subsec_settings in sector_settings.pop("subsectors") ._asdict() @@ -103,13 +104,14 @@ def __init__( interpolation: str = "linear", outputs: Callable | None = None, supply_prod: PRODUCTION_SIGNATURE | None = None, + timeslice_level: str | None = None, ): from muse.interactions import factory as interaction_factory from muse.outputs.sector import factory as ofactory from muse.production import maximum_production self.name: str = name - self.timeslice_level = "" + self.timeslice_level = timeslice_level """Name of the sector.""" self.subsectors: Sequence[Subsector] = list(subsectors) """Subsectors controlled by this object.""" diff --git a/src/muse/sectors/subsector.py b/src/muse/sectors/subsector.py index 8795725b6..d70015ca9 100644 --- a/src/muse/sectors/subsector.py +++ b/src/muse/sectors/subsector.py @@ -26,6 +26,7 @@ def __init__( name: str = "subsector", forecast: int = 5, expand_market_prices: bool = False, + timeslice_level: str | None = None, ): from muse import constraints as cs from muse import demand_share as ds @@ -39,7 +40,7 @@ def __init__( self.forecast = forecast self.name = name self.expand_market_prices = expand_market_prices - self.timeslice_level = "" + self.timeslice_level = timeslice_level """Whether to expand prices to include destination region. If ``True``, the input market prices are expanded of the missing "dst_region" @@ -138,6 +139,7 @@ def factory( regions: Sequence[str] | None = None, current_year: int | None = None, name: str = "subsector", + timeslice_level: str | None = None, ) -> Subsector: from muse import constraints as cs from muse import demand_share as ds @@ -162,6 +164,7 @@ def factory( investment=getattr(settings, "lpsolver", "adhoc"), forecast=getattr(settings, "forecast", 5), constraints=getattr(settings, "constraints", ()), + timeslice_level=timeslice_level, ) # technologies can have nans where a commodity # does not apply to a technology at all @@ -215,6 +218,7 @@ def factory( forecast=forecast, name=name, expand_market_prices=expand_market_prices, + timeslice_level=timeslice_level, ) From 0d2a19e1dcf41c343468b451df33546526d60c37 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Tue, 5 Nov 2024 10:55:32 +0000 Subject: [PATCH 60/92] Add basic transforms --- src/muse/investments.py | 2 +- src/muse/readers/csv.py | 4 +- src/muse/timeslices.py | 108 ++++++++++++++++------------------------ 3 files changed, 46 insertions(+), 68 deletions(-) diff --git a/src/muse/investments.py b/src/muse/investments.py index 05ed89771..428769a4e 100644 --- a/src/muse/investments.py +++ b/src/muse/investments.py @@ -389,6 +389,6 @@ def default_to_scipy(): def timeslice_op(x: xr.DataArray) -> xr.DataArray: from muse.timeslices import TIMESLICE, broadcast_timeslice - return (x / (TIMESLICE / broadcast_timeslice(TIMESLICE.sum(), level=""))).max( + return (x / (TIMESLICE / broadcast_timeslice(TIMESLICE.sum(), level=None))).max( "timeslice" ) diff --git a/src/muse/readers/csv.py b/src/muse/readers/csv.py index a768bccd6..607feedce 100644 --- a/src/muse/readers/csv.py +++ b/src/muse/readers/csv.py @@ -618,8 +618,8 @@ def read_initial_market( getLogger(__name__).info("Base year import not provided. Set to zero.") base_year_import = xr.zeros_like(projections) - base_year_export = distribute_timeslice(base_year_export, level="") - base_year_import = distribute_timeslice(base_year_import, level="") + base_year_export = distribute_timeslice(base_year_export, level=None) + base_year_import = distribute_timeslice(base_year_import, level=None) base_year_export.name = "exports" base_year_import.name = "imports" diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index 4a4b64658..d581bb84f 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -11,98 +11,73 @@ from collections.abc import Mapping, Sequence from typing import Union -from numpy import ndarray -from pandas import MultiIndex +import pandas as pd from xarray import DataArray TIMESLICE: DataArray = None # type: ignore """Array with the finest timeslice.""" -TRANSFORMS: dict[tuple, ndarray] = None # type: ignore -"""Transforms from each aggregate to the finest timeslice.""" +TRANSFORMS: dict[str, DataArray] = None # type: ignore def read_timeslices( settings: Union[Mapping, str], level_names: Sequence[str] = ("month", "day", "hour"), - name: str = "timeslice", -) -> DataArray: - '''Reads reference timeslice from toml like input. - - Arguments: - settings: A dictionary of nested dictionaries or a string that toml will - interpret as such. The nesting specifies different levels of the timeslice. - If a dictionary and it contains "timeslices" key, then the associated value - is used as the root dictionary. Ultimately, the most nested values should be - relative weights for each slice in the timeslice, e.g. the corresponding - number of hours. - level_names: Hints indicating the names of each level. Can also be given a - "level_names" key in ``settings``. - name: name of the reference array - - Return: - A ``DataArray`` with dimension *timeslice* and values representing the relative - weight of each timeslice. - - Example: - >>> from muse.timeslices import read_timeslices - >>> read_timeslices( - ... """ - ... [timeslices] - ... spring.weekday = 5 - ... spring.weekend = 2 - ... autumn.weekday = 5 - ... autumn.weekend = 2 - ... winter.weekday = 5 - ... winter.weekend = 2 - ... summer.weekday = 5 - ... summer.weekend = 2 - ... level_names = ["season", "week"] - ... """ - ... ) # doctest: +SKIP - Size: 32B - array([5, 2, 5, 2, 5, 2, 5, 2]) - Coordinates: - * timeslice (timeslice) object 64B MultiIndex - * season (timeslice) object 64B 'spring' 'spring' ... 'summer' 'summer' - * week (timeslice) object 64B 'weekday' 'weekend' ... 'weekend' - ''' +) -> pd.DataFrame: from functools import reduce from toml import loads + # Read timeslice settings if isinstance(settings, str): settings = loads(settings) settings = dict(**settings.get("timeslices", settings)) + + # Extract level names if "level_names" in settings: level_names = settings.pop("level_names") - settings.pop("aggregates", {}) - # figures out levels - levels: list[tuple] = [(level,) for level in settings] + # Extract timeslice levels and lengths ts = list(settings.values()) + levels: list[tuple] = [(level,) for level in settings] while all(isinstance(v, Mapping) for v in ts): levels = [(*previous, b) for previous, a in zip(levels, ts) for b in a] ts = reduce(list.__add__, (list(u.values()) for u in ts), []) - nln = min(len(levels[0]), len(level_names)) - level_names = ( - list(level_names[:nln]) + [str(i) for i in range(len(levels[0]))][nln:] - ) - indices = MultiIndex.from_tuples(levels, names=level_names) - - if any( - reduce(set.union, indices.levels[:i], set()).intersection(indices.levels[i]) - for i in range(1, indices.nlevels) - ): - raise ValueError("Names from different levels should not overlap.") - - return DataArray(ts, coords={"timeslice": indices}, dims=name) + # Create DataFrame + df = pd.DataFrame(ts, columns=["value"]) + df["level"] = levels + df[level_names] = pd.DataFrame(df["level"].tolist(), index=df.index) + df = df.drop("level", axis=1).set_index(level_names) + return df def setup_module(settings: Union[str, Mapping]): """Sets up module singletons.""" global TIMESLICE - TIMESLICE = read_timeslices(settings) + global TRANSFORMS + + df = read_timeslices(settings) + + # Global timeslicing scheme + TIMESLICE = DataArray( + df.values.flatten(), coords={"timeslice": df.index}, dims="timeslice" + ) + + # Timeslices aggregated to each level + TRANSFORMS = {} + levels = df.index.names + for i, level in enumerate(levels): + group = levels[: i + 1] + df_grouped = df.groupby(group, sort=False).sum() + if isinstance(df_grouped.index, pd.MultiIndex): + coords = {"timeslice": df_grouped.index} + else: + coords = {"timeslice": df_grouped.index.tolist()} + TRANSFORMS[level] = DataArray( + df_grouped.values.flatten(), + coords=coords, + dims="timeslice", + ) def broadcast_timeslice(x, ts=None, level=None): @@ -111,8 +86,8 @@ def broadcast_timeslice(x, ts=None, level=None): if ts is None: ts = TIMESLICE - if level is None: - pass + if level is not None: + ts = TRANSFORMS[level] # If x already has timeslices, check that it is matches the reference timeslice. if "timeslice" in x.dims: @@ -129,6 +104,9 @@ def distribute_timeslice(x, ts=None, level=None): if ts is None: ts = TIMESLICE + if level is not None: + ts = TRANSFORMS[level] + extensive = broadcast_timeslice(x, ts, level) return extensive * (ts / broadcast_timeslice(ts.sum(), level=level)) From 4ff56c2f81488c43e3abbd981fe5afe5c6a578f9 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 13 Nov 2024 15:56:56 +0000 Subject: [PATCH 61/92] Fix some errors from merge --- src/muse/costs.py | 7 ++++++- src/muse/production.py | 4 +++- src/muse/quantities.py | 8 ++++++-- 3 files changed, 15 insertions(+), 4 deletions(-) diff --git a/src/muse/costs.py b/src/muse/costs.py index def46c9df..bef6b9ab2 100644 --- a/src/muse/costs.py +++ b/src/muse/costs.py @@ -282,7 +282,12 @@ def lifetime_levelized_cost_of_energy( fuels = is_fuel(technologies.comm_usage) # Calculate consumption - cons = consumption(technologies=techs, production=production, prices=prices) + cons = consumption( + technologies=techs, + production=production, + prices=prices, + timeslice_level=timeslice_level, + ) # Cost of installed capacity installed_capacity_costs = distribute_timeslice( diff --git a/src/muse/production.py b/src/muse/production.py index e680352f1..2f1e10bc6 100644 --- a/src/muse/production.py +++ b/src/muse/production.py @@ -127,4 +127,6 @@ def supply( """ from muse.quantities import supply - return supply(capacity, market.consumption, technologies) + return supply( + capacity, market.consumption, technologies, timeslice_level=timeslice_level + ) diff --git a/src/muse/quantities.py b/src/muse/quantities.py index 02e864306..74f9c6a8a 100644 --- a/src/muse/quantities.py +++ b/src/muse/quantities.py @@ -43,8 +43,12 @@ def supply( from muse.commodities import CommodityUsage, check_usage, is_pollutant from muse.timeslices import broadcast_timeslice - maxprod = maximum_production(technologies, capacity) - minprod = minimum_production(technologies, capacity) + maxprod = maximum_production( + technologies, capacity, timeslice_level=timeslice_level + ) + minprod = minimum_production( + technologies, capacity, timeslice_level=timeslice_level + ) size = np.array(maxprod.region).size # in presence of trade demand needs to map maxprod dst_region if ( From 738e07d3bd7db25d0dc83e208054b3e819c6a85d Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Wed, 13 Nov 2024 18:04:49 +0000 Subject: [PATCH 62/92] Placeholder functions for sector conversion --- src/muse/mca.py | 1 + src/muse/sectors/abstract.py | 1 - src/muse/sectors/sector.py | 20 +++++++++++++++++++- src/muse/timeslices.py | 2 ++ 4 files changed, 22 insertions(+), 2 deletions(-) diff --git a/src/muse/mca.py b/src/muse/mca.py index 850e7673e..37e3c34cf 100644 --- a/src/muse/mca.py +++ b/src/muse/mca.py @@ -374,6 +374,7 @@ def single_year_iteration( sector_market = sector.next( market[["supply", "consumption", "prices"]] # type:ignore ) + # TODO: check sector_market is in global timeslicing scheme sector_market = sector_market.sel(year=market.year) # Calculate net consumption diff --git a/src/muse/sectors/abstract.py b/src/muse/sectors/abstract.py index 231b444c4..4fe560531 100644 --- a/src/muse/sectors/abstract.py +++ b/src/muse/sectors/abstract.py @@ -29,7 +29,6 @@ def factory(cls, name: str, settings: Any) -> AbstractSector: @abstractmethod def next(self, mca_market: Dataset) -> Dataset: """Advance sector by one time period.""" - pass def __repr__(self): return f"<{self.name.title()} sector - object at {hex(id(self))}>" diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index bfcc38f36..46df08058 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -191,6 +191,10 @@ def group_assets(x: xr.DataArray) -> xr.DataArray: # Agent interactions self.interactions(list(self.agents)) + # Convert market to sector timeslicing + # TODO: check mca_market is in global timeslicing scheme + mca_market = self.convert_to_sector_timeslicing(mca_market) + # Select appropriate data from the market market = mca_market.sel( commodity=self.technologies.commodity, region=self.technologies.region @@ -261,7 +265,9 @@ def group_assets(x: xr.DataArray) -> xr.DataArray: commodity=result.commodity ) result.set_coords("comm_usage") - return result + + # Convert result to global timeslicing scheme + return self.convert_to_global_timeslicing(result) def save_outputs(self) -> None: """Calls the outputs function with the current output data.""" @@ -304,6 +310,18 @@ def market_variables(self, market: xr.Dataset, technologies: xr.Dataset) -> Any: return supply, consume, costs + def convert_to_sector_timeslicing(self, market: xr.Dataset) -> xr.Dataset: + """Converts market data to sector timeslicing.""" + # sector_market = func(market, self.timeslice_level) + # return sector_market + return market + + def convert_to_global_timeslicing(self, market: xr.Dataset) -> xr.Dataset: + """Converts market data to global timeslicing.""" + # global_market = func(market) + # return global_market + return market + @property def capacity(self) -> xr.DataArray: """Aggregates capacity across agents. diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index d581bb84f..935fe6226 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -81,6 +81,7 @@ def setup_module(settings: Union[str, Mapping]): def broadcast_timeslice(x, ts=None, level=None): + """Convert a non-timesliced array to a timesliced array by broadcasting.""" from xarray import Coordinates if ts is None: @@ -101,6 +102,7 @@ def broadcast_timeslice(x, ts=None, level=None): def distribute_timeslice(x, ts=None, level=None): + """Convert a non-timesliced array to a timesliced array by distribution.""" if ts is None: ts = TIMESLICE From ff847bb7d7fc934ea119489089d94704292d4031 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 14 Nov 2024 09:36:07 +0000 Subject: [PATCH 63/92] Functions working with single timeslice level --- src/muse/sectors/sector.py | 25 +++++++++++++++----- src/muse/timeslices.py | 48 +++++++++++++++++++++++++++++++++++++- 2 files changed, 66 insertions(+), 7 deletions(-) diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index 46df08058..1cf09414d 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -312,15 +312,28 @@ def market_variables(self, market: xr.Dataset, technologies: xr.Dataset) -> Any: def convert_to_sector_timeslicing(self, market: xr.Dataset) -> xr.Dataset: """Converts market data to sector timeslicing.""" - # sector_market = func(market, self.timeslice_level) - # return sector_market - return market + from muse.timeslices import compress_timeslice + + supply = compress_timeslice( + market["supply"], level=self.timeslice_level, operation="sum" + ) + consumption = compress_timeslice( + market["consumption"], level=self.timeslice_level, operation=sum + ) + prices = compress_timeslice( + market["prices"], level=self.timeslice_level, operation="mean" + ) + return xr.Dataset(dict(supply=supply, consumption=consumption, prices=prices)) def convert_to_global_timeslicing(self, market: xr.Dataset) -> xr.Dataset: """Converts market data to global timeslicing.""" - # global_market = func(market) - # return global_market - return market + from muse.timeslices import expand_timeslice + + supply = expand_timeslice(market["supply"], operation="distribute") + consumption = expand_timeslice(market["consumption"], operation="distribute") + costs = expand_timeslice(market["costs"], operation="distribute") + + return xr.Dataset(dict(supply=supply, consumption=consumption, costs=costs)) @property def capacity(self) -> xr.DataArray: diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index 935fe6226..200f9de37 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -90,7 +90,7 @@ def broadcast_timeslice(x, ts=None, level=None): if level is not None: ts = TRANSFORMS[level] - # If x already has timeslices, check that it is matches the reference timeslice. + # If x already has timeslices, check that it matches the reference timeslice. if "timeslice" in x.dims: if x.timeslice.reset_coords(drop=True).equals(ts.timeslice): return x @@ -113,6 +113,52 @@ def distribute_timeslice(x, ts=None, level=None): return extensive * (ts / broadcast_timeslice(ts.sum(), level=level)) +def compress_timeslice(x, ts=None, level=None, operation="sum"): + """Convert a timesliced array to a lower level by performing the given operation. + + The operation can be either 'sum', or 'item' + - sum: weighted sum according to timeslice length + - mean: weighted mean according to timeslice length + """ + if ts is None: + ts = TIMESLICE + + if not x.timeslice.reset_coords(drop=True).equals(ts.timeslice): + raise ValueError( + "x must be in the global timeslicing scheme to perform this operation." + ) + + finest_level = ts.timeslice.to_index().names[-1] + if level == finest_level: + return x + + # Perform the operation over one timeslice level + coarser_level = ... + + # Recurse + return compress_timeslice(x, ts=ts, level=level, operation=operation) + +def expand_timeslice(x, ts=None, operation="distribute"): + """Convert a timesliced array to the global scheme by expanding. + + The operation can be either 'distribute', or 'broadcast' + - distribute: distribute the values according to timeslice length + - broadcast: broadcast the values across the new timeslice level + """ + if ts is None: + ts = TIMESLICE + + current_level = x.timeslice.to_index().names[-1] + finest_level = ts.timeslice.to_index().names[-1] + if current_level == finest_level: + return x + + # Perform the operation over one timeslice level + finer_level = ... + + # Recurse + return expand_timeslice(x, ts=ts, operation=operation) + def drop_timeslice(data: DataArray) -> DataArray: """Drop the timeslice variable from a DataArray. From cb5b9a19dbb80f48f4b503da7ed57694e8b08a4d Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 14 Nov 2024 10:46:23 +0000 Subject: [PATCH 64/92] Working compress_timeslice function --- src/muse/sectors/sector.py | 2 +- src/muse/timeslices.py | 57 ++++++++++++++++++++++++++++++-------- 2 files changed, 46 insertions(+), 13 deletions(-) diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index 1cf09414d..0eb5a8c12 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -318,7 +318,7 @@ def convert_to_sector_timeslicing(self, market: xr.Dataset) -> xr.Dataset: market["supply"], level=self.timeslice_level, operation="sum" ) consumption = compress_timeslice( - market["consumption"], level=self.timeslice_level, operation=sum + market["consumption"], level=self.timeslice_level, operation="sum" ) prices = compress_timeslice( market["prices"], level=self.timeslice_level, operation="mean" diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index 200f9de37..e4e984bb7 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -116,28 +116,46 @@ def distribute_timeslice(x, ts=None, level=None): def compress_timeslice(x, ts=None, level=None, operation="sum"): """Convert a timesliced array to a lower level by performing the given operation. - The operation can be either 'sum', or 'item' - - sum: weighted sum according to timeslice length - - mean: weighted mean according to timeslice length + The operation can be either 'sum', or 'mean' """ if ts is None: ts = TIMESLICE - if not x.timeslice.reset_coords(drop=True).equals(ts.timeslice): - raise ValueError( - "x must be in the global timeslicing scheme to perform this operation." - ) + # If level is not specified, don't compress + if level is None: + return x - finest_level = ts.timeslice.to_index().names[-1] - if level == finest_level: + # Get level names from x + level_names = x.timeslice.to_index().names + if level not in level_names: + raise ValueError(f"Unknown level: {level}. Must be one of {level_names}.") + current_level, coarser_levels = level_names[-1], level_names[:-1] + + # Return if already at the desired level + if current_level == level: return x # Perform the operation over one timeslice level - coarser_level = ... + if operation == "sum": + x = ( + x.unstack(dim="timeslice") + .sum(current_level) + .stack(timeslice=coarser_levels) + ) + elif operation == "mean": + # TODO: This should be a weighted mean according to timeslice length + x = ( + x.unstack(dim="timeslice") + .mean(current_level) + .stack(timeslice=coarser_levels) + ) + else: + raise ValueError(f"Unknown operation: {operation}. Must be 'sum' or 'mean'.") # Recurse return compress_timeslice(x, ts=ts, level=level, operation=operation) + def expand_timeslice(x, ts=None, operation="distribute"): """Convert a timesliced array to the global scheme by expanding. @@ -148,17 +166,32 @@ def expand_timeslice(x, ts=None, operation="distribute"): if ts is None: ts = TIMESLICE + # Get level names from ts + level_names = ts.timeslice.to_index().names + finest_level = level_names[-1] + + # Return if already at the finest level current_level = x.timeslice.to_index().names[-1] - finest_level = ts.timeslice.to_index().names[-1] if current_level == finest_level: return x + else: + pass # Perform the operation over one timeslice level - finer_level = ... + finer_level = level_names[level_names.index(current_level) + 1] + if operation == "broadcast": + return x # TODO + elif operation == "distribute": + return x # TODO + else: + raise ValueError( + f"Unknown operation: {operation}. Must be 'distribute' or 'broadcast'." + ) # Recurse return expand_timeslice(x, ts=ts, operation=operation) + def drop_timeslice(data: DataArray) -> DataArray: """Drop the timeslice variable from a DataArray. From 62ab9aca2cd7c3a7eef7d640b7d34d09ae93f2e5 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 14 Nov 2024 11:19:10 +0000 Subject: [PATCH 65/92] Typing, remove aggregates, skeleton for tests --- src/muse/data/default_settings.toml | 5 ----- src/muse/timeslices.py | 17 +++++++++++++---- tests/conftest.py | 7 ------- 3 files changed, 13 insertions(+), 16 deletions(-) diff --git a/src/muse/data/default_settings.toml b/src/muse/data/default_settings.toml index 9081520fb..35bc1113c 100644 --- a/src/muse/data/default_settings.toml +++ b/src/muse/data/default_settings.toml @@ -70,8 +70,3 @@ summer.weekend.night = 150 summer.weekend.morning = 150 summer.weekend.afternoon = 150 summer.weekend.evening = 150 - -[timeslices.aggregates] -all-day = ["night", "morning", "afternoon", "early-peak", "late-peak", "evening"] -all-week = ["weekday", "weekend"] -all-year = ["winter", "summer", "spring-autumn"] diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index e4e984bb7..1c0f68360 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -80,7 +80,9 @@ def setup_module(settings: Union[str, Mapping]): ) -def broadcast_timeslice(x, ts=None, level=None): +def broadcast_timeslice( + x: DataArray, ts: DataArray | None = None, level: str | None = None +): """Convert a non-timesliced array to a timesliced array by broadcasting.""" from xarray import Coordinates @@ -101,7 +103,7 @@ def broadcast_timeslice(x, ts=None, level=None): return extensive -def distribute_timeslice(x, ts=None, level=None): +def distribute_timeslice(x: DataArray, ts: DataArray | None = None, level=None): """Convert a non-timesliced array to a timesliced array by distribution.""" if ts is None: ts = TIMESLICE @@ -113,7 +115,12 @@ def distribute_timeslice(x, ts=None, level=None): return extensive * (ts / broadcast_timeslice(ts.sum(), level=level)) -def compress_timeslice(x, ts=None, level=None, operation="sum"): +def compress_timeslice( + x: DataArray, + ts: DataArray | None = None, + level: str | None = None, + operation: str = "sum", +): """Convert a timesliced array to a lower level by performing the given operation. The operation can be either 'sum', or 'mean' @@ -156,7 +163,9 @@ def compress_timeslice(x, ts=None, level=None, operation="sum"): return compress_timeslice(x, ts=ts, level=level, operation=operation) -def expand_timeslice(x, ts=None, operation="distribute"): +def expand_timeslice( + x: DataArray, ts: DataArray | None = None, operation: str = "distribute" +): """Convert a timesliced array to the global scheme by expanding. The operation can be either 'distribute', or 'broadcast' diff --git a/tests/conftest.py b/tests/conftest.py index f7d52adb8..e9e66c23c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -147,13 +147,6 @@ def default_timeslice_globals(): summer.weekend.afternoon = 150 summer.weekend.evening = 150 level_names = ["month", "day", "hour"] - - [timeslices.aggregates] - all-day = [ - "night", "morning", "afternoon", "early-peak", "late-peak", "evening", "night" - ] - all-week = ["weekday", "weekend"] - all-year = ["winter", "summer", "spring-autumn"] """ setup_module(default_timeslices) From b51143e39d228e88e5e8f5067f33c2c240ea0cae Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 14 Nov 2024 11:19:32 +0000 Subject: [PATCH 66/92] Skeleton for tests --- tests/test_timeslices.py | 86 +++++++++++++++++++++++++++++++++++++++- 1 file changed, 84 insertions(+), 2 deletions(-) diff --git a/tests/test_timeslices.py b/tests/test_timeslices.py index d1dd4e72f..996fcbe4f 100644 --- a/tests/test_timeslices.py +++ b/tests/test_timeslices.py @@ -19,8 +19,6 @@ def toml(): summer.weekend.night = 2 summer.weekend.dusk = 1 level_names = ["semester", "week", "day"] - [timeslices.aggregates] - allday = ["day", "night"] """ @@ -116,3 +114,87 @@ def test_drop_timeslice(timeslice_dataarray): data_without_timeslice = DataArray([1, 2, 3], dims=["x"]) assert drop_timeslice(data_without_timeslice).equals(data_without_timeslice) assert drop_timeslice(dropped).equals(dropped) + + +@fixture +def non_timesliced_dataarray(): + return DataArray([1, 2, 3], dims=["x"]) + + +def test_broadcast_timeslice(non_timesliced_dataarray, timeslice): + from muse.timeslices import broadcast_timeslice + + # Test 1: normal call + out = broadcast_timeslice(non_timesliced_dataarray) + # Assert timeslicing in output matches the global scheme + assert out.timeslice.equals(TIMESLICE.timeslice) + # Assert all values are equal to each other + + # Assert all values in the output are equal to the input + + # Test 2: calling on a compatible timesliced array + # Assert the input is returned unchanged + + # Test 3: calling on an incompatible timesliced array + # Assert ValueError is raised + + pass + + +def test_distribute_timeslice(non_timesliced_dataarray): + from muse.timeslices import distribute_timeslice, TIMESLICE + + # Test 1: normal call + # Assert timeslicing in output matches the global scheme + # Assert all values are in proportion to timeslice length + # Assert sum of output across timeslices is equal to the input + + # Test 2: calling on a compatible timesliced array + # Assert the input is returned unchanged + + # Test 3: calling on an incompatible timesliced array + # Assert ValueError is raised + + pass + + +def test_compress_timeslice(non_timesliced_dataarray): + from muse.timeslices import compress_timeslice + + # Test 1: without specifying level + # Assert output matches input + + # Test 2: invalid operation + # Assert ValueError is raised + + # Test 3: sum operation + # Assert timeslicing is the correct level + # Assert sum of output equals sum of input + + # Test 4: mean operation + # Assert timeslicing is the correct level + # Assert weighted mean of output equals weighted mean of input + + pass + + +def test_expand_timeslice(timeslice_dataarray): + from muse.timeslices import expand_timeslice + + # Test 1: calling on an already expanded array + # Assert the input is returned unchanged + + # Test 2: invalid operation + # Assert ValueError is raised + + # Test 3: broadcast operation + # Assert timeslicing matches the global scheme + # Assert all values are equal to each other + # Assert all values in the output are equal to the input + + # Test 4: distribute operation + # Assert timeslicing matches the global scheme + # Assert all values are in proportion to timeslice length + # Assert sum of output across timeslices is equal to the input + + pass From 1f35e8d597ffedbb6b47a5dfb0114a4b3e8e162d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 14 Nov 2024 11:21:17 +0000 Subject: [PATCH 67/92] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- tests/test_timeslices.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/test_timeslices.py b/tests/test_timeslices.py index 996fcbe4f..7cb5bb0a9 100644 --- a/tests/test_timeslices.py +++ b/tests/test_timeslices.py @@ -142,7 +142,6 @@ def test_broadcast_timeslice(non_timesliced_dataarray, timeslice): def test_distribute_timeslice(non_timesliced_dataarray): - from muse.timeslices import distribute_timeslice, TIMESLICE # Test 1: normal call # Assert timeslicing in output matches the global scheme @@ -159,7 +158,6 @@ def test_distribute_timeslice(non_timesliced_dataarray): def test_compress_timeslice(non_timesliced_dataarray): - from muse.timeslices import compress_timeslice # Test 1: without specifying level # Assert output matches input @@ -179,7 +177,6 @@ def test_compress_timeslice(non_timesliced_dataarray): def test_expand_timeslice(timeslice_dataarray): - from muse.timeslices import expand_timeslice # Test 1: calling on an already expanded array # Assert the input is returned unchanged From 8c772d7815d5404bef121f84df19810c2703f68c Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 14 Nov 2024 11:53:10 +0000 Subject: [PATCH 68/92] Restore patch to tests --- tests/conftest.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index e9e66c23c..8a79fefef 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,12 +1,14 @@ from collections.abc import Mapping, Sequence from pathlib import Path from typing import Callable, Optional +from unittest.mock import patch import numpy as np from pandas import DataFrame from pytest import fixture from xarray import DataArray, Dataset +from muse.__main__ import patched_broadcast_compat_data from muse.agents import Agent @@ -19,6 +21,14 @@ def logger(): return logger +@fixture(autouse=True) +def patch_broadcast_compat_data(): + with patch( + "xarray.core.variable._broadcast_compat_data", patched_broadcast_compat_data + ): + yield + + @fixture() def sectors_dir(tmpdir): """Copies sectors directory to new dir. From 597a203729fc601833ab23c024db7aee10059423 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 14 Nov 2024 11:55:09 +0000 Subject: [PATCH 69/92] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- tests/test_timeslices.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/test_timeslices.py b/tests/test_timeslices.py index 7cb5bb0a9..b912acc76 100644 --- a/tests/test_timeslices.py +++ b/tests/test_timeslices.py @@ -142,7 +142,6 @@ def test_broadcast_timeslice(non_timesliced_dataarray, timeslice): def test_distribute_timeslice(non_timesliced_dataarray): - # Test 1: normal call # Assert timeslicing in output matches the global scheme # Assert all values are in proportion to timeslice length @@ -158,7 +157,6 @@ def test_distribute_timeslice(non_timesliced_dataarray): def test_compress_timeslice(non_timesliced_dataarray): - # Test 1: without specifying level # Assert output matches input @@ -177,7 +175,6 @@ def test_compress_timeslice(non_timesliced_dataarray): def test_expand_timeslice(timeslice_dataarray): - # Test 1: calling on an already expanded array # Assert the input is returned unchanged From 08234e23bd75ff51f1218b9290fbc87f24cd8e1f Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 14 Nov 2024 13:49:49 +0000 Subject: [PATCH 70/92] Tests for broadcast and distribute, replace transforms with compress --- src/muse/timeslices.py | 60 ++++++++++++++++---------------------- tests/test_timeslices.py | 62 +++++++++++++++++++++++++--------------- 2 files changed, 64 insertions(+), 58 deletions(-) diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index 1c0f68360..d9ca98e58 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -15,8 +15,6 @@ from xarray import DataArray TIMESLICE: DataArray = None # type: ignore -"""Array with the finest timeslice.""" -TRANSFORMS: dict[str, DataArray] = None # type: ignore def read_timeslices( @@ -43,41 +41,33 @@ def read_timeslices( levels = [(*previous, b) for previous, a in zip(levels, ts) for b in a] ts = reduce(list.__add__, (list(u.values()) for u in ts), []) - # Create DataFrame - df = pd.DataFrame(ts, columns=["value"]) - df["level"] = levels - df[level_names] = pd.DataFrame(df["level"].tolist(), index=df.index) - df = df.drop("level", axis=1).set_index(level_names) - return df + nln = min(len(levels[0]), len(level_names)) + level_names = ( + list(level_names[:nln]) + [str(i) for i in range(len(levels[0]))][nln:] + ) + indices = pd.MultiIndex.from_tuples(levels, names=level_names) + + if any( + reduce(set.union, indices.levels[:i], set()).intersection(indices.levels[i]) + for i in range(1, indices.nlevels) + ): + raise ValueError("Names from different levels should not overlap.") + + return DataArray(ts, coords={"timeslice": indices}, dims="timeslice") + + # # Create DataFrame + # df = pd.DataFrame(ts, columns=["value"]) + # df["level"] = levels + # df[level_names] = pd.DataFrame(df["level"].tolist(), index=df.index) + # df = df.drop("level", axis=1).set_index(level_names) + # return df def setup_module(settings: Union[str, Mapping]): """Sets up module singletons.""" global TIMESLICE - global TRANSFORMS - df = read_timeslices(settings) - - # Global timeslicing scheme - TIMESLICE = DataArray( - df.values.flatten(), coords={"timeslice": df.index}, dims="timeslice" - ) - - # Timeslices aggregated to each level - TRANSFORMS = {} - levels = df.index.names - for i, level in enumerate(levels): - group = levels[: i + 1] - df_grouped = df.groupby(group, sort=False).sum() - if isinstance(df_grouped.index, pd.MultiIndex): - coords = {"timeslice": df_grouped.index} - else: - coords = {"timeslice": df_grouped.index.tolist()} - TRANSFORMS[level] = DataArray( - df_grouped.values.flatten(), - coords=coords, - dims="timeslice", - ) + TIMESLICE = read_timeslices(settings) def broadcast_timeslice( @@ -90,7 +80,7 @@ def broadcast_timeslice( ts = TIMESLICE if level is not None: - ts = TRANSFORMS[level] + ts = compress_timeslice(ts, level=level, operation="sum") # If x already has timeslices, check that it matches the reference timeslice. if "timeslice" in x.dims: @@ -109,10 +99,10 @@ def distribute_timeslice(x: DataArray, ts: DataArray | None = None, level=None): ts = TIMESLICE if level is not None: - ts = TRANSFORMS[level] + ts = compress_timeslice(ts, level=level, operation="sum") - extensive = broadcast_timeslice(x, ts, level) - return extensive * (ts / broadcast_timeslice(ts.sum(), level=level)) + extensive = broadcast_timeslice(x, ts) + return extensive * (ts / broadcast_timeslice(ts.sum(), ts)) def compress_timeslice( diff --git a/tests/test_timeslices.py b/tests/test_timeslices.py index b912acc76..c3479e442 100644 --- a/tests/test_timeslices.py +++ b/tests/test_timeslices.py @@ -1,6 +1,6 @@ """Test timeslice utilities.""" -from pytest import fixture +from pytest import approx, fixture, raises from xarray import DataArray @@ -121,42 +121,58 @@ def non_timesliced_dataarray(): return DataArray([1, 2, 3], dims=["x"]) -def test_broadcast_timeslice(non_timesliced_dataarray, timeslice): +def test_broadcast_timeslice(non_timesliced_dataarray, timeslice, timeslice_dataarray): from muse.timeslices import broadcast_timeslice - # Test 1: normal call out = broadcast_timeslice(non_timesliced_dataarray) - # Assert timeslicing in output matches the global scheme - assert out.timeslice.equals(TIMESLICE.timeslice) - # Assert all values are equal to each other - # Assert all values in the output are equal to the input + # Check that timeslicing in output matches the global scheme + assert out.timeslice.equals(timeslice.timeslice) - # Test 2: calling on a compatible timesliced array - # Assert the input is returned unchanged + # Check that all timeslices in the output are equal to each other + assert (out.diff(dim="timeslice") == 0).all() - # Test 3: calling on an incompatible timesliced array - # Assert ValueError is raised + # Check that all values in the output are equal to the input + assert all( + (out.isel(timeslice=i) == non_timesliced_dataarray).all() + for i in range(out.sizes["timeslice"]) + ) - pass + # Calling on an already timesliced array: the input should be returned unchanged + out2 = broadcast_timeslice(out) + assert out2.equals(out) + # Calling with an incompatible timeslicing scheme: ValueError should be raised + with raises(ValueError): + broadcast_timeslice(out, ts=timeslice_dataarray) -def test_distribute_timeslice(non_timesliced_dataarray): - # Test 1: normal call - # Assert timeslicing in output matches the global scheme - # Assert all values are in proportion to timeslice length - # Assert sum of output across timeslices is equal to the input - # Test 2: calling on a compatible timesliced array - # Assert the input is returned unchanged +def test_distribute_timeslice(non_timesliced_dataarray, timeslice, timeslice_dataarray): + from muse.timeslices import broadcast_timeslice, distribute_timeslice - # Test 3: calling on an incompatible timesliced array - # Assert ValueError is raised + out = distribute_timeslice(non_timesliced_dataarray) - pass + # Check that timeslicing in output matches the global scheme + assert out.timeslice.equals(timeslice.timeslice) + + # Check that all values are proportional to timeslice lengths + out_proportions = out / broadcast_timeslice(out.sum("timeslice")) + ts_proportions = timeslice / broadcast_timeslice(timeslice.sum("timeslice")) + assert abs(out_proportions - ts_proportions).max() < 1e-6 + + # Check that the sum across timeslices is equal to the input + assert (out.sum("timeslice") == approx(non_timesliced_dataarray)).all() + + # Calling on an already timesliced array: the input should be returned unchanged + out2 = distribute_timeslice(out) + assert out2.equals(out) + + # Calling with an incompatible timeslicing scheme: ValueError should be raised + with raises(ValueError): + distribute_timeslice(out, ts=timeslice_dataarray) -def test_compress_timeslice(non_timesliced_dataarray): +def test_compress_timeslice(non_timesliced_dataarray, timeslice, timeslice_dataarray): # Test 1: without specifying level # Assert output matches input From 78f50f4ed016b4c8deaba000539aa3d62224f2ae Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 14 Nov 2024 15:20:49 +0000 Subject: [PATCH 71/92] Test for compress_timeslice --- src/muse/investments.py | 2 ++ src/muse/timeslices.py | 6 +++++ tests/test_timeslices.py | 57 ++++++++++++++++++++++++++-------------- 3 files changed, 45 insertions(+), 20 deletions(-) diff --git a/src/muse/investments.py b/src/muse/investments.py index 2c47ac4a7..d6501da34 100644 --- a/src/muse/investments.py +++ b/src/muse/investments.py @@ -224,6 +224,7 @@ def adhoc_match_demand( technologies: xr.Dataset, constraints: list[Constraint], year: int, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: from muse.demand_matching import demand_matching from muse.quantities import capacity_in_use, maximum_production @@ -237,6 +238,7 @@ def adhoc_match_demand( year=year, technology=costs.replacement, commodity=demand.commodity, + timeslice_level=timeslice_level, ).drop_vars("technology") # Push disabled techs to last rank. diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index d9ca98e58..4b198a105 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -101,6 +101,12 @@ def distribute_timeslice(x: DataArray, ts: DataArray | None = None, level=None): if level is not None: ts = compress_timeslice(ts, level=level, operation="sum") + # If x already has timeslices, check that it matches the reference timeslice. + if "timeslice" in x.dims: + if x.timeslice.reset_coords(drop=True).equals(ts.timeslice): + return x + raise ValueError("x has incompatible timeslicing.") + extensive = broadcast_timeslice(x, ts) return extensive * (ts / broadcast_timeslice(ts.sum(), ts)) diff --git a/tests/test_timeslices.py b/tests/test_timeslices.py index c3479e442..3db99f185 100644 --- a/tests/test_timeslices.py +++ b/tests/test_timeslices.py @@ -121,8 +121,8 @@ def non_timesliced_dataarray(): return DataArray([1, 2, 3], dims=["x"]) -def test_broadcast_timeslice(non_timesliced_dataarray, timeslice, timeslice_dataarray): - from muse.timeslices import broadcast_timeslice +def test_broadcast_timeslice(non_timesliced_dataarray, timeslice): + from muse.timeslices import broadcast_timeslice, compress_timeslice out = broadcast_timeslice(non_timesliced_dataarray) @@ -142,13 +142,17 @@ def test_broadcast_timeslice(non_timesliced_dataarray, timeslice, timeslice_data out2 = broadcast_timeslice(out) assert out2.equals(out) - # Calling with an incompatible timeslicing scheme: ValueError should be raised + # Calling on an array with inappropraite timeslicing: ValueError should be raised with raises(ValueError): - broadcast_timeslice(out, ts=timeslice_dataarray) + broadcast_timeslice(compress_timeslice(timeslice, level="day")) -def test_distribute_timeslice(non_timesliced_dataarray, timeslice, timeslice_dataarray): - from muse.timeslices import broadcast_timeslice, distribute_timeslice +def test_distribute_timeslice(non_timesliced_dataarray, timeslice): + from muse.timeslices import ( + broadcast_timeslice, + compress_timeslice, + distribute_timeslice, + ) out = distribute_timeslice(non_timesliced_dataarray) @@ -167,27 +171,40 @@ def test_distribute_timeslice(non_timesliced_dataarray, timeslice, timeslice_dat out2 = distribute_timeslice(out) assert out2.equals(out) - # Calling with an incompatible timeslicing scheme: ValueError should be raised + # Calling on an array with inappropraite timeslicing: ValueError should be raised with raises(ValueError): - distribute_timeslice(out, ts=timeslice_dataarray) + distribute_timeslice(compress_timeslice(timeslice, level="day")) -def test_compress_timeslice(non_timesliced_dataarray, timeslice, timeslice_dataarray): - # Test 1: without specifying level - # Assert output matches input +def test_compress_timeslice(non_timesliced_dataarray, timeslice): + from muse.timeslices import broadcast_timeslice, compress_timeslice - # Test 2: invalid operation - # Assert ValueError is raised + # Create timesliced dataarray for testing + timesliced_dataarray = broadcast_timeslice(non_timesliced_dataarray) - # Test 3: sum operation - # Assert timeslicing is the correct level - # Assert sum of output equals sum of input + # Sum operation + out = compress_timeslice(timesliced_dataarray, operation="sum", level="day") + assert out.timeslice.to_index().names[-1] == "day" + assert (out.sum("timeslice") == approx(timesliced_dataarray.sum("timeslice"))).all() - # Test 4: mean operation - # Assert timeslicing is the correct level - # Assert weighted mean of output equals weighted mean of input + # Mean operation + out = compress_timeslice(timesliced_dataarray, operation="mean", level="day") + assert out.timeslice.to_index().names[-1] == "day" + assert ( + out.mean("timeslice") == approx(timesliced_dataarray.mean("timeslice")) + ).all() - pass + # Calling without specifying a level: the input should be returned unchanged + out = compress_timeslice(timesliced_dataarray) + assert out.equals(timesliced_dataarray) + + # Calling with an invalid level: ValueError should be raised + with raises(ValueError): + compress_timeslice(timesliced_dataarray, level="invalid") + + # Calling with an invalid operation: ValueError should be raised + with raises(ValueError): + compress_timeslice(timesliced_dataarray, level="day", operation="invalid") def test_expand_timeslice(timeslice_dataarray): From ef8ecf2b6b169cc145343cc6e3994d9d9cdea84c Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 14 Nov 2024 17:20:41 +0000 Subject: [PATCH 72/92] Finish functions and tests --- src/muse/timeslices.py | 23 +++++++------ tests/test_timeslices.py | 70 +++++++++++++++++++++++++--------------- 2 files changed, 58 insertions(+), 35 deletions(-) diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index 4b198a105..bcc4ac6e3 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -11,6 +11,7 @@ from collections.abc import Mapping, Sequence from typing import Union +import numpy as np import pandas as pd from xarray import DataArray @@ -145,6 +146,7 @@ def compress_timeslice( .sum(current_level) .stack(timeslice=coarser_levels) ) + # return x.unstack(dim="timeslice").sum(["hour"]).stack(timeslice=["month", "day"]) elif operation == "mean": # TODO: This should be a weighted mean according to timeslice length x = ( @@ -173,28 +175,31 @@ def expand_timeslice( # Get level names from ts level_names = ts.timeslice.to_index().names - finest_level = level_names[-1] # Return if already at the finest level + finest_level = level_names[-1] current_level = x.timeslice.to_index().names[-1] if current_level == finest_level: return x - else: - pass - # Perform the operation over one timeslice level - finer_level = level_names[level_names.index(current_level) + 1] + # Prepare mask + mask = ts.unstack(dim="timeslice") if operation == "broadcast": - return x # TODO + mask = mask.where(np.isnan(mask), 1) elif operation == "distribute": - return x # TODO + mask /= mask.sum(level_names[level_names.index(current_level) + 1 :]) else: raise ValueError( f"Unknown operation: {operation}. Must be 'distribute' or 'broadcast'." ) - # Recurse - return expand_timeslice(x, ts=ts, operation=operation) + # Perform the operation + return ( + (x.unstack(dim="timeslice") * mask) + .stack(timeslice=level_names) + .dropna("timeslice") + .sel(timeslice=ts.timeslice) + ) def drop_timeslice(data: DataArray) -> DataArray: diff --git a/tests/test_timeslices.py b/tests/test_timeslices.py index 3db99f185..ccb0722c0 100644 --- a/tests/test_timeslices.py +++ b/tests/test_timeslices.py @@ -179,20 +179,23 @@ def test_distribute_timeslice(non_timesliced_dataarray, timeslice): def test_compress_timeslice(non_timesliced_dataarray, timeslice): from muse.timeslices import broadcast_timeslice, compress_timeslice - # Create timesliced dataarray for testing + # Create timesliced dataarray for compressing timesliced_dataarray = broadcast_timeslice(non_timesliced_dataarray) - # Sum operation - out = compress_timeslice(timesliced_dataarray, operation="sum", level="day") - assert out.timeslice.to_index().names[-1] == "day" - assert (out.sum("timeslice") == approx(timesliced_dataarray.sum("timeslice"))).all() - - # Mean operation - out = compress_timeslice(timesliced_dataarray, operation="mean", level="day") - assert out.timeslice.to_index().names[-1] == "day" - assert ( - out.mean("timeslice") == approx(timesliced_dataarray.mean("timeslice")) - ).all() + for level in ["month", "day", "hour"]: + # Sum operation + out = compress_timeslice(timesliced_dataarray, operation="sum", level=level) + assert out.timeslice.to_index().names[-1] == level + assert ( + out.sum("timeslice") == approx(timesliced_dataarray.sum("timeslice")) + ).all() + + # Mean operation + out = compress_timeslice(timesliced_dataarray, operation="mean", level=level) + assert out.timeslice.to_index().names[-1] == level + assert ( + out.mean("timeslice") == approx(timesliced_dataarray.mean("timeslice")) + ).all() # Calling without specifying a level: the input should be returned unchanged out = compress_timeslice(timesliced_dataarray) @@ -207,21 +210,36 @@ def test_compress_timeslice(non_timesliced_dataarray, timeslice): compress_timeslice(timesliced_dataarray, level="day", operation="invalid") -def test_expand_timeslice(timeslice_dataarray): - # Test 1: calling on an already expanded array - # Assert the input is returned unchanged +def test_expand_timeslice(non_timesliced_dataarray, timeslice): + from muse.timeslices import broadcast_timeslice, expand_timeslice + + for level in ["month", "day", "hour"]: + # Create timesliced dataarray for expanding + timesliced_dataarray = broadcast_timeslice( + non_timesliced_dataarray, level=level + ) - # Test 2: invalid operation - # Assert ValueError is raised + # Broadcast operation + out = expand_timeslice(timesliced_dataarray, operation="broadcast") + assert out.timeslice.equals(timeslice.timeslice) + assert ( + out.mean("timeslice") == approx(timesliced_dataarray.mean("timeslice")) + ).all() - # Test 3: broadcast operation - # Assert timeslicing matches the global scheme - # Assert all values are equal to each other - # Assert all values in the output are equal to the input + # Distribute operation + out = expand_timeslice(timesliced_dataarray, operation="distribute") + assert out.timeslice.equals(timeslice.timeslice) + assert ( + out.sum("timeslice") == approx(timesliced_dataarray.sum("timeslice")) + ).all() - # Test 4: distribute operation - # Assert timeslicing matches the global scheme - # Assert all values are in proportion to timeslice length - # Assert sum of output across timeslices is equal to the input + # Calling on an already expanded array: the input should be returned unchanged + out2 = expand_timeslice(out) + assert out.equals(out2) - pass + # Calling with an invalid operation: ValueError should be raised + with raises(ValueError): + timesliced_dataarray = broadcast_timeslice( + non_timesliced_dataarray, level="month" + ) + expand_timeslice(timesliced_dataarray, operation="invalid") From 270ffff12ad21bfadc7f68ab498c2fb635e2b770 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 14 Nov 2024 17:40:53 +0000 Subject: [PATCH 73/92] Change type hinting to work with 3.9 --- src/muse/agents/agent.py | 4 ++-- src/muse/constraints.py | 12 ++++++------ src/muse/costs.py | 8 ++++---- src/muse/demand_share.py | 12 ++++++------ src/muse/investments.py | 8 ++++++-- src/muse/objectives.py | 12 ++++++------ src/muse/production.py | 6 +++--- src/muse/quantities.py | 18 +++++++++--------- src/muse/sectors/sector.py | 3 ++- src/muse/sectors/subsector.py | 5 +++-- src/muse/timeslices.py | 2 +- 11 files changed, 48 insertions(+), 42 deletions(-) diff --git a/src/muse/agents/agent.py b/src/muse/agents/agent.py index c9706553a..f21e9a7c0 100644 --- a/src/muse/agents/agent.py +++ b/src/muse/agents/agent.py @@ -23,7 +23,7 @@ def __init__( interpolation: str = "linear", category: Optional[str] = None, quantity: Optional[float] = 1, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, ): """Creates a standard MUSE agent. @@ -116,7 +116,7 @@ def __init__( asset_threshold: float = 1e-4, quantity: Optional[float] = 1, spend_limit: int = 0, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, **kwargs, ): """Creates a standard agent. diff --git a/src/muse/constraints.py b/src/muse/constraints.py index e77f3a60c..daa49f349 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -248,7 +248,7 @@ def constraints( market: xr.Dataset, technologies: xr.Dataset, year: int | None = None, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, ) -> list[Constraint]: if year is None: year = int(market.year.min()) @@ -449,7 +449,7 @@ def max_production( market: xr.Dataset, technologies: xr.Dataset, year: int | None = None, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, **kwargs, ) -> Constraint: """Constructs constraint between capacity and maximum production. @@ -515,7 +515,7 @@ def demand_limiting_capacity( market: xr.Dataset, technologies: xr.Dataset, year: int | None = None, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, **kwargs, ) -> Constraint: """Limits the maximum combined capacity to match the demand. @@ -731,7 +731,7 @@ def minimum_service( market: xr.Dataset, technologies: xr.Dataset, year: int | None = None, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, **kwargs, ) -> Constraint | None: """Constructs constraint between capacity and minimum service.""" @@ -775,7 +775,7 @@ def minimum_service( def lp_costs( - technologies: xr.Dataset, costs: xr.DataArray, timeslice_level: str | None = None + technologies: xr.Dataset, costs: xr.DataArray, timeslice_level: Optional[str] = None ) -> xr.Dataset: """Creates costs for solving with scipy's LP solver. @@ -1195,7 +1195,7 @@ def factory( technologies: xr.Dataset, costs: xr.DataArray, *constraints: Constraint, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, ) -> ScipyAdapter: lpcosts = lp_costs(technologies, costs, timeslice_level=timeslice_level) diff --git a/src/muse/costs.py b/src/muse/costs.py index bef6b9ab2..0a267ad65 100644 --- a/src/muse/costs.py +++ b/src/muse/costs.py @@ -23,7 +23,7 @@ def net_present_value( capacity: xr.DataArray, production: xr.DataArray, year: int, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: """Net present value (NPV) of the relevant technologies. @@ -192,7 +192,7 @@ def equivalent_annual_cost( capacity: xr.DataArray, production: xr.DataArray, year: int, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: """Equivalent annual costs (or annualized cost) of a technology. @@ -225,7 +225,7 @@ def lifetime_levelized_cost_of_energy( capacity: xr.DataArray, production: xr.DataArray, year: int, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: """Levelized cost of energy (LCOE) of technologies over their lifetime. @@ -357,7 +357,7 @@ def annual_levelized_cost_of_energy( prices: xr.DataArray, interpolation: str = "linear", fill_value: Union[int, str] = "extrapolate", - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, **filters, ) -> xr.DataArray: """Undiscounted levelized cost of energy (LCOE) of technologies on each given year. diff --git a/src/muse/demand_share.py b/src/muse/demand_share.py index 665dd305c..d42e0fa7f 100644 --- a/src/muse/demand_share.py +++ b/src/muse/demand_share.py @@ -114,7 +114,7 @@ def new_and_retro( technologies: xr.Dataset, current_year: int, forecast: int, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: r"""Splits demand across new and retro agents. @@ -329,7 +329,7 @@ def standard_demand( technologies: xr.Dataset, current_year: int, forecast: int, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: r"""Splits demand across new agents. @@ -439,7 +439,7 @@ def unmet_forecasted_demand( technologies: xr.Dataset, current_year: int, forecast: int, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: """Forecast demand that cannot be serviced by non-decommissioned current assets.""" from muse.commodities import is_enduse @@ -514,7 +514,7 @@ def unmet_demand( market: xr.Dataset, capacity: xr.DataArray, technologies: xr.Dataset, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, ): r"""Share of the demand that cannot be serviced by the existing assets. @@ -554,7 +554,7 @@ def new_consumption( technologies: xr.Dataset, current_year: int, forecast: int, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: r"""Computes share of the demand attributed to new agents. @@ -595,7 +595,7 @@ def new_and_retro_demands( technologies: xr.Dataset, current_year: int, forecast: int, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, ) -> xr.Dataset: """Splits demand into *new* and *retrofit* demand. diff --git a/src/muse/investments.py b/src/muse/investments.py index d6501da34..a97d61c92 100644 --- a/src/muse/investments.py +++ b/src/muse/investments.py @@ -394,6 +394,10 @@ def timeslice_max(x: xr.DataArray) -> xr.DataArray: This first annualizes the value in each timeslice by dividing by the fraction of the year that the timeslice occupies, then takes the maximum value """ - from muse.timeslices import TIMESLICE, broadcast_timeslice + from muse.timeslices import TIMESLICE, broadcast_timeslice, compress_timeslice - return (x / (TIMESLICE / broadcast_timeslice(TIMESLICE.sum()))).max("timeslice") + timeslice_level = x.timeslice.to_index().names[-1] + timeslice_fractions = compress_timeslice( + TIMESLICE, level=timeslice_level + ) / broadcast_timeslice(TIMESLICE.sum(), level=timeslice_level) + return (x / timeslice_fractions).max("timeslice") diff --git a/src/muse/objectives.py b/src/muse/objectives.py index bdbce691c..cd9f7a137 100644 --- a/src/muse/objectives.py +++ b/src/muse/objectives.py @@ -63,7 +63,7 @@ def comfort( ] from collections.abc import Mapping, MutableMapping, Sequence -from typing import Any, Callable, Union +from typing import Any, Callable, Optional, Union import numpy as np import xarray as xr @@ -130,7 +130,7 @@ def objectives( technologies: xr.Dataset, demand: xr.DataArray, prices: xr.DataArray, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, *args, **kwargs, ) -> xr.Dataset: @@ -387,7 +387,7 @@ def lifetime_levelized_cost_of_energy( technologies: xr.Dataset, demand: xr.DataArray, prices: xr.DataArray, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, *args, **kwargs, ): @@ -428,7 +428,7 @@ def net_present_value( technologies: xr.Dataset, demand: xr.DataArray, prices: xr.DataArray, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, *args, **kwargs, ): @@ -463,7 +463,7 @@ def net_present_cost( technologies: xr.Dataset, demand: xr.DataArray, prices: xr.DataArray, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, *args, **kwargs, ): @@ -497,7 +497,7 @@ def equivalent_annual_cost( technologies: xr.Dataset, demand: xr.DataArray, prices: xr.DataArray, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, *args, **kwargs, ): diff --git a/src/muse/production.py b/src/muse/production.py index 2f1e10bc6..076c818c4 100644 --- a/src/muse/production.py +++ b/src/muse/production.py @@ -38,7 +38,7 @@ def production( "PRODUCTION_SIGNATURE", ] from collections.abc import Mapping, MutableMapping -from typing import Any, Callable, Union, cast +from typing import Any, Callable, Optional, Union, cast import xarray as xr @@ -101,7 +101,7 @@ def maximum_production( market: xr.Dataset, capacity: xr.DataArray, technologies: xr.Dataset, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: """Production when running at full capacity. @@ -118,7 +118,7 @@ def supply( market: xr.Dataset, capacity: xr.DataArray, technologies: xr.Dataset, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: """Service current demand equally from all assets. diff --git a/src/muse/quantities.py b/src/muse/quantities.py index 74f9c6a8a..d2c34378a 100644 --- a/src/muse/quantities.py +++ b/src/muse/quantities.py @@ -18,7 +18,7 @@ def supply( capacity: xr.DataArray, demand: xr.DataArray, technologies: Union[xr.Dataset, xr.DataArray], - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: """Production and emission for a given capacity servicing a given demand. @@ -120,7 +120,7 @@ def supply( def emission( production: xr.DataArray, fixed_outputs: xr.DataArray, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, ): """Computes emission from current products. @@ -154,7 +154,7 @@ def gross_margin( technologies: xr.Dataset, capacity: xr.DataArray, prices: xr.Dataset, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: """The percentage of revenue after direct expenses have been subtracted. @@ -237,7 +237,7 @@ def decommissioning_demand( technologies: xr.Dataset, capacity: xr.DataArray, year: Optional[Sequence[int]] = None, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: r"""Computes demand from process decommissioning. @@ -287,7 +287,7 @@ def consumption( technologies: xr.Dataset, production: xr.DataArray, prices: Optional[xr.DataArray] = None, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, **kwargs, ) -> xr.DataArray: """Commodity consumption when fulfilling the whole production. @@ -342,7 +342,7 @@ def consumption( def maximum_production( technologies: xr.Dataset, capacity: xr.DataArray, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, **filters, ): r"""Production for a given capacity. @@ -402,7 +402,7 @@ def capacity_in_use( production: xr.DataArray, technologies: xr.Dataset, max_dim: Optional[Union[str, tuple[str]]] = "commodity", - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, **filters, ): """Capacity-in-use for each asset, given production. @@ -456,7 +456,7 @@ def capacity_in_use( def minimum_production( technologies: xr.Dataset, capacity: xr.DataArray, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, **filters, ): r"""Minimum production for a given capacity. @@ -523,7 +523,7 @@ def minimum_production( def capacity_to_service_demand( demand: xr.DataArray, technologies: xr.Dataset, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: """Minimum capacity required to fulfill the demand.""" from muse.timeslices import broadcast_timeslice, distribute_timeslice diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index 0eb5a8c12..6b6ff6dc7 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -4,6 +4,7 @@ from typing import ( Any, Callable, + Optional, cast, ) @@ -104,7 +105,7 @@ def __init__( interpolation: str = "linear", outputs: Callable | None = None, supply_prod: PRODUCTION_SIGNATURE | None = None, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, ): from muse.interactions import factory as interaction_factory from muse.outputs.sector import factory as ofactory diff --git a/src/muse/sectors/subsector.py b/src/muse/sectors/subsector.py index 381a49b2f..784397124 100644 --- a/src/muse/sectors/subsector.py +++ b/src/muse/sectors/subsector.py @@ -4,6 +4,7 @@ from typing import ( Any, Callable, + Optional, ) import numpy as np @@ -26,7 +27,7 @@ def __init__( name: str = "subsector", forecast: int = 5, expand_market_prices: bool = False, - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, ): from muse import constraints as cs from muse import demand_share as ds @@ -125,7 +126,7 @@ def factory( regions: Sequence[str] | None = None, current_year: int | None = None, name: str = "subsector", - timeslice_level: str | None = None, + timeslice_level: Optional[str] = None, ) -> Subsector: from muse import constraints as cs from muse import demand_share as ds diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index bcc4ac6e3..99ec3c915 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -187,7 +187,7 @@ def expand_timeslice( if operation == "broadcast": mask = mask.where(np.isnan(mask), 1) elif operation == "distribute": - mask /= mask.sum(level_names[level_names.index(current_level) + 1 :]) + mask = mask / mask.sum(level_names[level_names.index(current_level) + 1 :]) else: raise ValueError( f"Unknown operation: {operation}. Must be 'distribute' or 'broadcast'." From 0188872947e57ccb7d79bdb86ec5169d2faa1605 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 14 Nov 2024 17:45:36 +0000 Subject: [PATCH 74/92] Same again --- src/muse/timeslices.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index 99ec3c915..c10356e36 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -9,7 +9,7 @@ ] from collections.abc import Mapping, Sequence -from typing import Union +from typing import Union, Optional import numpy as np import pandas as pd @@ -72,7 +72,7 @@ def setup_module(settings: Union[str, Mapping]): def broadcast_timeslice( - x: DataArray, ts: DataArray | None = None, level: str | None = None + x: DataArray, ts: Optional[DataArray] = None, level: Optional[str] = None ): """Convert a non-timesliced array to a timesliced array by broadcasting.""" from xarray import Coordinates @@ -94,7 +94,7 @@ def broadcast_timeslice( return extensive -def distribute_timeslice(x: DataArray, ts: DataArray | None = None, level=None): +def distribute_timeslice(x: DataArray, ts: Optional[DataArray] = None, level=None): """Convert a non-timesliced array to a timesliced array by distribution.""" if ts is None: ts = TIMESLICE @@ -114,8 +114,8 @@ def distribute_timeslice(x: DataArray, ts: DataArray | None = None, level=None): def compress_timeslice( x: DataArray, - ts: DataArray | None = None, - level: str | None = None, + ts: Optional[DataArray] = None, + level: Optional[str] = None, operation: str = "sum", ): """Convert a timesliced array to a lower level by performing the given operation. @@ -162,7 +162,7 @@ def compress_timeslice( def expand_timeslice( - x: DataArray, ts: DataArray | None = None, operation: str = "distribute" + x: DataArray, ts: Optional[DataArray] = None, operation: str = "distribute" ): """Convert a timesliced array to the global scheme by expanding. From 8414c2804b68477fc94dc5806f2931e170c8873b Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 14 Nov 2024 17:47:00 +0000 Subject: [PATCH 75/92] Lint --- src/muse/timeslices.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index c10356e36..83e118e31 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -9,7 +9,7 @@ ] from collections.abc import Mapping, Sequence -from typing import Union, Optional +from typing import Optional, Union import numpy as np import pandas as pd From 1eab8637a20b42a5113135d300abf286238bde41 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 14 Nov 2024 18:12:07 +0000 Subject: [PATCH 76/92] Improve tests --- tests/test_timeslices.py | 52 ++++++++++++++++++++++------------------ 1 file changed, 29 insertions(+), 23 deletions(-) diff --git a/tests/test_timeslices.py b/tests/test_timeslices.py index ccb0722c0..f606a1fd2 100644 --- a/tests/test_timeslices.py +++ b/tests/test_timeslices.py @@ -124,27 +124,29 @@ def non_timesliced_dataarray(): def test_broadcast_timeslice(non_timesliced_dataarray, timeslice): from muse.timeslices import broadcast_timeslice, compress_timeslice - out = broadcast_timeslice(non_timesliced_dataarray) + for level in ["month", "day", "hour"]: + out = broadcast_timeslice(non_timesliced_dataarray, level=level) + target_timeslices = compress_timeslice(timeslice, level=level) - # Check that timeslicing in output matches the global scheme - assert out.timeslice.equals(timeslice.timeslice) + # Check that timeslicing in output matches the global scheme + assert out.timeslice.equals(target_timeslices.timeslice) - # Check that all timeslices in the output are equal to each other - assert (out.diff(dim="timeslice") == 0).all() + # Check that all timeslices in the output are equal to each other + assert (out.diff(dim="timeslice") == 0).all() - # Check that all values in the output are equal to the input - assert all( - (out.isel(timeslice=i) == non_timesliced_dataarray).all() - for i in range(out.sizes["timeslice"]) - ) + # Check that all values in the output are equal to the input + assert all( + (out.isel(timeslice=i) == non_timesliced_dataarray).all() + for i in range(out.sizes["timeslice"]) + ) - # Calling on an already timesliced array: the input should be returned unchanged + # Calling on a fully timesliced array: the input should be returned unchanged out2 = broadcast_timeslice(out) assert out2.equals(out) # Calling on an array with inappropraite timeslicing: ValueError should be raised with raises(ValueError): - broadcast_timeslice(compress_timeslice(timeslice, level="day")) + broadcast_timeslice(compress_timeslice(out, level="day")) def test_distribute_timeslice(non_timesliced_dataarray, timeslice): @@ -154,26 +156,30 @@ def test_distribute_timeslice(non_timesliced_dataarray, timeslice): distribute_timeslice, ) - out = distribute_timeslice(non_timesliced_dataarray) + for level in ["month", "day", "hour"]: + out = distribute_timeslice(non_timesliced_dataarray, level=level) + target_timeslices = compress_timeslice(timeslice, level=level) - # Check that timeslicing in output matches the global scheme - assert out.timeslice.equals(timeslice.timeslice) + # Check that timeslicing in output matches the global scheme + assert out.timeslice.equals(target_timeslices.timeslice) - # Check that all values are proportional to timeslice lengths - out_proportions = out / broadcast_timeslice(out.sum("timeslice")) - ts_proportions = timeslice / broadcast_timeslice(timeslice.sum("timeslice")) - assert abs(out_proportions - ts_proportions).max() < 1e-6 + # Check that all values are proportional to timeslice lengths + out_proportions = out / broadcast_timeslice(out.sum("timeslice"), level=level) + ts_proportions = target_timeslices / broadcast_timeslice( + target_timeslices.sum("timeslice"), level=level + ) + assert abs(out_proportions - ts_proportions).max() < 1e-6 - # Check that the sum across timeslices is equal to the input - assert (out.sum("timeslice") == approx(non_timesliced_dataarray)).all() + # Check that the sum across timeslices is equal to the input + assert (out.sum("timeslice") == approx(non_timesliced_dataarray)).all() - # Calling on an already timesliced array: the input should be returned unchanged + # Calling on a fully timesliced array: the input should be returned unchanged out2 = distribute_timeslice(out) assert out2.equals(out) # Calling on an array with inappropraite timeslicing: ValueError should be raised with raises(ValueError): - distribute_timeslice(compress_timeslice(timeslice, level="day")) + distribute_timeslice(compress_timeslice(out, level="day")) def test_compress_timeslice(non_timesliced_dataarray, timeslice): From 7901d1ae0eec18df6a18cf3b4aa755e4e4c210a3 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 15 Nov 2024 11:29:46 +0000 Subject: [PATCH 77/92] Add parameter descriptions --- src/muse/agents/agent.py | 6 ++++++ src/muse/costs.py | 4 ++++ src/muse/demand_share.py | 2 ++ src/muse/quantities.py | 5 +++++ src/muse/timeslices.py | 7 +++++++ 5 files changed, 24 insertions(+) diff --git a/src/muse/agents/agent.py b/src/muse/agents/agent.py index f21e9a7c0..474212298 100644 --- a/src/muse/agents/agent.py +++ b/src/muse/agents/agent.py @@ -40,6 +40,9 @@ def __init__( together. quantity: optional value to classify different agents' share of the population. + timeslice_level: the timeslice level over which investments/production + will be optimized (e.g "hour", "day"). If None, the agent will use the + finest timeslice level. """ from uuid import uuid4 @@ -144,6 +147,9 @@ def __init__( asset_threshold: Threshold below which assets are not added. quantity: different agents' share of the population spend_limit: The cost above which agents will not invest + timeslice_level: the timeslice level over which the agent invesments will + be optimized (e.g "hour", "day"). If None, the agent will use the finest + timeslice level. **kwargs: Extra arguments """ from muse.decisions import factory as decision_factory diff --git a/src/muse/costs.py b/src/muse/costs.py index 0a267ad65..cd6084003 100644 --- a/src/muse/costs.py +++ b/src/muse/costs.py @@ -52,6 +52,7 @@ def net_present_value( capacity: xr.DataArray with the capacity of the relevant technologies production: xr.DataArray with the production of the relevant technologies year: int, the year of the forecast + timeslice_level: the desired timeslice level of the result (e.g. "hour", "day") Return: xr.DataArray with the NPV calculated for the relevant technologies @@ -210,6 +211,7 @@ def equivalent_annual_cost( capacity: xr.DataArray with the capacity of the relevant technologies production: xr.DataArray with the production of the relevant technologies year: int, the year of the forecast + timeslice_level: the desired timeslice level of the result (e.g. "hour", "day") Return: xr.DataArray with the EAC calculated for the relevant technologies @@ -237,6 +239,7 @@ def lifetime_levelized_cost_of_energy( capacity: xr.DataArray with the capacity of the relevant technologies production: xr.DataArray with the production of the relevant technologies year: int, the year of the forecast + timeslice_level: the desired timeslice level of the result (e.g. "hour", "day") Return: xr.DataArray with the LCOE calculated for the relevant technologies @@ -385,6 +388,7 @@ def annual_levelized_cost_of_energy( This dataarray contains at least timeslice and commodity dimensions. interpolation: interpolation method. fill_value: Fill value for values outside the extrapolation range. + timeslice_level: the desired timeslice level of the result (e.g. "hour", "day") **filters: Anything by which prices can be filtered. Return: diff --git a/src/muse/demand_share.py b/src/muse/demand_share.py index d42e0fa7f..e8cf0d458 100644 --- a/src/muse/demand_share.py +++ b/src/muse/demand_share.py @@ -133,6 +133,7 @@ def new_and_retro( technologies: quantities describing the technologies. current_year: Current year of simulation forecast: How many years to forecast ahead + timeslice_level: the timeslice level of the sector (e.g. "hour", "day") Pseudo-code: @@ -348,6 +349,7 @@ def standard_demand( technologies: quantities describing the technologies. current_year: Current year of simulation forecast: How many years to forecast ahead + timeslice_level: the timeslice level of the sector (e.g. "hour", "day") """ from functools import partial diff --git a/src/muse/quantities.py b/src/muse/quantities.py index d2c34378a..b4e5c8a9a 100644 --- a/src/muse/quantities.py +++ b/src/muse/quantities.py @@ -35,6 +35,7 @@ def supply( exceed its share of the demand. technologies: factors bindings the capacity of an asset with its production of commodities and environmental pollutants. + timeslice_level: the desired timeslice level of the result (e.g. "hour", "day") Return: A data array where the commodity dimension only contains actual outputs (i.e. no @@ -131,6 +132,7 @@ def emission( when computing emissions. fixed_outputs: factor relating total production to emissions. For convenience, this can also be a `technologies` dataset containing `fixed_output`. + timeslice_level: the desired timeslice level of the result (e.g. "hour", "day") Return: A data array containing emissions (and only emissions). @@ -372,6 +374,7 @@ def maximum_production( technologies. Filters not relevant to the quantities of interest, i.e. filters that are not a dimension of `capacity` or `technologies`, are silently ignored. + timeslice_level: the desired timeslice level of the result (e.g. "hour", "day") Return: `capacity * fixed_outputs * utilization_factor`, whittled down according to the @@ -420,6 +423,7 @@ def capacity_in_use( technologies. Filters not relevant to the quantities of interest, i.e. filters that are not a dimension of `capacity` or `technologies`, are silently ignored. + timeslice_level: the desired timeslice level of the result (e.g. "hour", "day") Return: Capacity-in-use for each technology, whittled down by the filters. @@ -486,6 +490,7 @@ def minimum_production( technologies. Filters not relevant to the quantities of interest, i.e. filters that are not a dimension of `capacity` or `technologies`, are silently ignored. + timeslice_level: the desired timeslice level of the result (e.g. "hour", "day") Return: `capacity * fixed_outputs * minimum_service_factor`, whittled down according to diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index 83e118e31..91a9f22fb 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -211,3 +211,10 @@ def drop_timeslice(data: DataArray) -> DataArray: return data return data.drop_vars(data.timeslice.indexes) + + +def get_level(data: DataArray) -> str: + """Get the timeslice level of a DataArray.""" + if "timeslice" not in data.dims: + raise ValueError("DataArray does not have a 'timeslice' dimension.") + return data.timeslice.to_index().names[-1] From 6dadde12280e900d481c18f30515db24a49b6043 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 15 Nov 2024 14:49:20 +0000 Subject: [PATCH 78/92] Docstrings for new functions, use get_level --- src/muse/investments.py | 9 ++- src/muse/timeslices.py | 115 +++++++++++++++++++++++++++++---------- tests/test_timeslices.py | 6 +- 3 files changed, 96 insertions(+), 34 deletions(-) diff --git a/src/muse/investments.py b/src/muse/investments.py index a97d61c92..72f1209cc 100644 --- a/src/muse/investments.py +++ b/src/muse/investments.py @@ -394,9 +394,14 @@ def timeslice_max(x: xr.DataArray) -> xr.DataArray: This first annualizes the value in each timeslice by dividing by the fraction of the year that the timeslice occupies, then takes the maximum value """ - from muse.timeslices import TIMESLICE, broadcast_timeslice, compress_timeslice + from muse.timeslices import ( + TIMESLICE, + broadcast_timeslice, + compress_timeslice, + get_level, + ) - timeslice_level = x.timeslice.to_index().names[-1] + timeslice_level = get_level(x) timeslice_fractions = compress_timeslice( TIMESLICE, level=timeslice_level ) / broadcast_timeslice(TIMESLICE.sum(), level=timeslice_level) diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index 91a9f22fb..969b42ce2 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -67,14 +67,22 @@ def read_timeslices( def setup_module(settings: Union[str, Mapping]): """Sets up module singletons.""" global TIMESLICE - TIMESLICE = read_timeslices(settings) def broadcast_timeslice( x: DataArray, ts: Optional[DataArray] = None, level: Optional[str] = None -): - """Convert a non-timesliced array to a timesliced array by broadcasting.""" +) -> DataArray: + """Convert a non-timesliced array to a timesliced array by broadcasting. + + If x is already timesliced in the appropriate scheme, it will be returned unchanged. + + Args: + x: Array to broadcast. + ts: Dataarray with timeslice lengths. If None, defaults to the global timeslice. + level: Level to broadcast to. If None, use the finest level of ts. + + """ from xarray import Coordinates if ts is None: @@ -90,12 +98,23 @@ def broadcast_timeslice( raise ValueError("x has incompatible timeslicing.") mindex_coords = Coordinates.from_pandas_multiindex(ts.timeslice, "timeslice") - extensive = x.expand_dims(timeslice=ts["timeslice"]).assign_coords(mindex_coords) - return extensive + broadcasted = x.expand_dims(timeslice=ts["timeslice"]).assign_coords(mindex_coords) + return broadcasted -def distribute_timeslice(x: DataArray, ts: Optional[DataArray] = None, level=None): - """Convert a non-timesliced array to a timesliced array by distribution.""" +def distribute_timeslice( + x: DataArray, ts: Optional[DataArray] = None, level=None +) -> DataArray: + """Convert a non-timesliced array to a timesliced array by distribution. + + If x is already timesliced in the appropriate scheme, it will be returned unchanged. + + Args: + x: Array to distribute. + ts: Dataarray with timeslice lengths. If None, defaults to the global timeslice. + level: Level to distribute to. If None, use the finest level of ts. + + """ if ts is None: ts = TIMESLICE @@ -108,8 +127,9 @@ def distribute_timeslice(x: DataArray, ts: Optional[DataArray] = None, level=Non return x raise ValueError("x has incompatible timeslicing.") - extensive = broadcast_timeslice(x, ts) - return extensive * (ts / broadcast_timeslice(ts.sum(), ts)) + broadcasted = broadcast_timeslice(x, ts) + timeslice_fractions = ts / broadcast_timeslice(ts.sum(), ts) + return broadcasted * timeslice_fractions def compress_timeslice( @@ -117,25 +137,42 @@ def compress_timeslice( ts: Optional[DataArray] = None, level: Optional[str] = None, operation: str = "sum", -): - """Convert a timesliced array to a lower level by performing the given operation. +) -> DataArray: + """Convert a fully timesliced array to a coarser level. + + The operation can be either 'sum', or 'mean': + - sum: sum values at each compressed timeslice level + - mean: take a weighted average of values at each compressed timeslice level + + Args: + x: Timesliced array to compress. Must have the same timeslicing as ts. + ts: Dataarray with timeslice lengths. If None, defaults to the global timeslice. + level: Level to compress to. If None, don't compress. + operation: Operation to perform ("sum" or "mean"). Defaults to "sum". - The operation can be either 'sum', or 'mean' """ if ts is None: ts = TIMESLICE + # Raise error if x is not timesliced + if "timeslice" not in x.dims: + raise ValueError("DataArray must have a 'timeslice' dimension.") + # If level is not specified, don't compress if level is None: return x - # Get level names from x - level_names = x.timeslice.to_index().names - if level not in level_names: - raise ValueError(f"Unknown level: {level}. Must be one of {level_names}.") - current_level, coarser_levels = level_names[-1], level_names[:-1] + # x must have the same timeslicing as ts + if not x.timeslice.reset_coords(drop=True).equals(ts.timeslice): + raise ValueError("x has incompatible timeslicing.") - # Return if already at the desired level + # level must be a valid timeslice level + x_levels = x.timeslice.to_index().names + if level not in x_levels: + raise ValueError(f"Unknown level: {level}. Must be one of {x_levels}.") + current_level, coarser_levels = x_levels[-1], x_levels[:-1] + + # Return x unchanged if already at the desired level if current_level == level: return x @@ -163,22 +200,42 @@ def compress_timeslice( def expand_timeslice( x: DataArray, ts: Optional[DataArray] = None, operation: str = "distribute" -): - """Convert a timesliced array to the global scheme by expanding. +) -> DataArray: + """Convert a timesliced array to a finer level. The operation can be either 'distribute', or 'broadcast' - - distribute: distribute the values according to timeslice length - - broadcast: broadcast the values across the new timeslice level + - distribute: distribute values over the new timeslice level(s) according to + timeslice lengths, such that the sum of the output over all timeslices is equal + to the sum of the input + - broadcast: broadcast values across over the new timeslice level(s) + + Args: + x: Timesliced array to expand. + ts: Dataarray with timeslice lengths. If None, defaults to the global timeslice. + operation: Operation to perform ("distribute" or "broadcast"). + Defaults to "distribute". + """ if ts is None: ts = TIMESLICE - # Get level names from ts - level_names = ts.timeslice.to_index().names + # Raise error if x is not timesliced + if "timeslice" not in x.dims: + raise ValueError("DataArray must have a 'timeslice' dimension.") + + # Get level names + ts_levels = ts.timeslice.to_index().names + x_levels = x.timeslice.to_index().names + + # Raise error if x_level is not a subset of ts_levels + if not set(x_levels).issubset(ts_levels): + raise ValueError( + f"Timeslice levels of x ({x_levels}) must be a subset of ts ({ts_levels})." + ) - # Return if already at the finest level - finest_level = level_names[-1] - current_level = x.timeslice.to_index().names[-1] + # Return x unchanged if already at the desired level + finest_level = get_level(ts) + current_level = get_level(x) if current_level == finest_level: return x @@ -187,7 +244,7 @@ def expand_timeslice( if operation == "broadcast": mask = mask.where(np.isnan(mask), 1) elif operation == "distribute": - mask = mask / mask.sum(level_names[level_names.index(current_level) + 1 :]) + mask = mask / mask.sum(ts_levels[ts_levels.index(current_level) + 1 :]) else: raise ValueError( f"Unknown operation: {operation}. Must be 'distribute' or 'broadcast'." @@ -196,7 +253,7 @@ def expand_timeslice( # Perform the operation return ( (x.unstack(dim="timeslice") * mask) - .stack(timeslice=level_names) + .stack(timeslice=ts_levels) .dropna("timeslice") .sel(timeslice=ts.timeslice) ) diff --git a/tests/test_timeslices.py b/tests/test_timeslices.py index f606a1fd2..687da7122 100644 --- a/tests/test_timeslices.py +++ b/tests/test_timeslices.py @@ -183,7 +183,7 @@ def test_distribute_timeslice(non_timesliced_dataarray, timeslice): def test_compress_timeslice(non_timesliced_dataarray, timeslice): - from muse.timeslices import broadcast_timeslice, compress_timeslice + from muse.timeslices import broadcast_timeslice, compress_timeslice, get_level # Create timesliced dataarray for compressing timesliced_dataarray = broadcast_timeslice(non_timesliced_dataarray) @@ -191,14 +191,14 @@ def test_compress_timeslice(non_timesliced_dataarray, timeslice): for level in ["month", "day", "hour"]: # Sum operation out = compress_timeslice(timesliced_dataarray, operation="sum", level=level) - assert out.timeslice.to_index().names[-1] == level + assert get_level(out) == level assert ( out.sum("timeslice") == approx(timesliced_dataarray.sum("timeslice")) ).all() # Mean operation out = compress_timeslice(timesliced_dataarray, operation="mean", level=level) - assert out.timeslice.to_index().names[-1] == level + assert get_level(out) == level assert ( out.mean("timeslice") == approx(timesliced_dataarray.mean("timeslice")) ).all() From 5904acd42f0ff4ff90704b7f6db309da7eaf1311 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 15 Nov 2024 15:22:33 +0000 Subject: [PATCH 79/92] Add documentation --- docs/inputs/toml.rst | 52 +++++++++++++++++++++--------------------- src/muse/timeslices.py | 20 ++++++---------- 2 files changed, 33 insertions(+), 39 deletions(-) diff --git a/docs/inputs/toml.rst b/docs/inputs/toml.rst index 6cadb7b8f..972cfa5bd 100644 --- a/docs/inputs/toml.rst +++ b/docs/inputs/toml.rst @@ -319,6 +319,32 @@ A sector accepts these attributes: Additional methods can be registered with :py:func:`muse.production.register_production` +*technodata* + Path to a csv file containing the characterization of the technologies involved in + the sector, e.g. lifetime, capital costs, etc... See :ref:`inputs-technodata`. + +*technodata_timeslices* + Optional. Path to a csv file describing the utilization factor and minimum service + factor of each technology in each timeslice. + See :ref:`user_guide/inputs/technodata_timeslices`. + +*commodities_in* + Path to a csv file describing the inputs of each technology involved in the sector. + See :ref:`inputs-iocomms`. + +*commodities_out* + Path to a csv file describing the outputs of each technology involved in the sector. + See :ref:`inputs-iocomms`. + +*timeslice_level* + Optional. This represents the level of timeslice granularity over which commodity + flows out of the sector are balanced (e.g. if "day", the sector will aim to meet + commodity demands on a daily basis, rather than an hourly basis). + If not given, defaults to the finest level defined in the global `timeslices` section. + Note: If *technodata_timeslices* is used, the data in this file must match the timeslice + level of the sector (e.g. with global timeslice levels "month", "day" and "hour", if a sector has "day" as + the timeslice level, then *technodata_timeslices* must have columns "month" and "day", but not "hour") + Sectors contain a number of subsections: *interactions* Defines interactions between agents. These interactions take place right before new @@ -372,32 +398,6 @@ Sectors contain a number of subsections: "new_to_retro" type of network has been defined but no retro agents are included in the sector. -*technodata* - - Defines technologies and their features, in terms of costs, efficiencies, and emissions. - - *technodata* are specified as an inline TOML table, e.g. with single - brackets. A technodata section would look like: - - .. code-block:: TOML - - [sectors.residential.technodata] - technodata = '{path}/technodata/residential/Technodata.csv' - commodities_in = '{path}/technodata/residential/CommIn.csv' - commodities_out = '{path}/technodata/residential/CommOut.csv' - - Where: - *technodata* - Path to a csv file containing the characterization of the technologies involved in - the sector, e.g. lifetime, capital costs, etc... See :ref:`inputs-technodata`. - - *commodities_in* - Path to a csv file describing the inputs of each technology involved in the sector. - See :ref:`inputs-iocomms`. - - *commodities_out* - Path to a csv file describing the outputs of each technology involved in the sector. - See :ref:`inputs-iocomms`. *subsectors* diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index 969b42ce2..5cb00b160 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -42,27 +42,23 @@ def read_timeslices( levels = [(*previous, b) for previous, a in zip(levels, ts) for b in a] ts = reduce(list.__add__, (list(u.values()) for u in ts), []) + # Prepare multiindex nln = min(len(levels[0]), len(level_names)) level_names = ( list(level_names[:nln]) + [str(i) for i in range(len(levels[0]))][nln:] ) indices = pd.MultiIndex.from_tuples(levels, names=level_names) + # Make sure names from different levels don't overlap if any( reduce(set.union, indices.levels[:i], set()).intersection(indices.levels[i]) for i in range(1, indices.nlevels) ): raise ValueError("Names from different levels should not overlap.") + # Create DataArray return DataArray(ts, coords={"timeslice": indices}, dims="timeslice") - # # Create DataFrame - # df = pd.DataFrame(ts, columns=["value"]) - # df["level"] = levels - # df[level_names] = pd.DataFrame(df["level"].tolist(), index=df.index) - # df = df.drop("level", axis=1).set_index(level_names) - # return df - def setup_module(settings: Union[str, Mapping]): """Sets up module singletons.""" @@ -154,18 +150,16 @@ def compress_timeslice( if ts is None: ts = TIMESLICE - # Raise error if x is not timesliced + # Raise error if x is not timesliced appropriately if "timeslice" not in x.dims: - raise ValueError("DataArray must have a 'timeslice' dimension.") + raise ValueError("x must have a 'timeslice' dimension.") + if not x.timeslice.reset_coords(drop=True).equals(ts.timeslice): + raise ValueError("x has incompatible timeslicing.") # If level is not specified, don't compress if level is None: return x - # x must have the same timeslicing as ts - if not x.timeslice.reset_coords(drop=True).equals(ts.timeslice): - raise ValueError("x has incompatible timeslicing.") - # level must be a valid timeslice level x_levels = x.timeslice.to_index().names if level not in x_levels: From ddeedeb6aeb62f2b4cafedabaa8c7bac7347b612 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 15:25:06 +0000 Subject: [PATCH 80/92] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- docs/inputs/toml.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/inputs/toml.rst b/docs/inputs/toml.rst index 972cfa5bd..058b64e20 100644 --- a/docs/inputs/toml.rst +++ b/docs/inputs/toml.rst @@ -337,8 +337,8 @@ A sector accepts these attributes: See :ref:`inputs-iocomms`. *timeslice_level* - Optional. This represents the level of timeslice granularity over which commodity - flows out of the sector are balanced (e.g. if "day", the sector will aim to meet + Optional. This represents the level of timeslice granularity over which commodity + flows out of the sector are balanced (e.g. if "day", the sector will aim to meet commodity demands on a daily basis, rather than an hourly basis). If not given, defaults to the finest level defined in the global `timeslices` section. Note: If *technodata_timeslices* is used, the data in this file must match the timeslice From d57d324f986c69cca822c833a69fc5f780665e3f Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 15 Nov 2024 16:55:59 +0000 Subject: [PATCH 81/92] Finish compress_timeslice (still small todo) --- docs/inputs/toml.rst | 4 ++-- src/muse/timeslices.py | 30 +++++++++++++----------------- tests/test_timeslices.py | 3 ++- 3 files changed, 17 insertions(+), 20 deletions(-) diff --git a/docs/inputs/toml.rst b/docs/inputs/toml.rst index 972cfa5bd..058b64e20 100644 --- a/docs/inputs/toml.rst +++ b/docs/inputs/toml.rst @@ -337,8 +337,8 @@ A sector accepts these attributes: See :ref:`inputs-iocomms`. *timeslice_level* - Optional. This represents the level of timeslice granularity over which commodity - flows out of the sector are balanced (e.g. if "day", the sector will aim to meet + Optional. This represents the level of timeslice granularity over which commodity + flows out of the sector are balanced (e.g. if "day", the sector will aim to meet commodity demands on a daily basis, rather than an hourly basis). If not given, defaults to the finest level defined in the global `timeslices` section. Note: If *technodata_timeslices* is used, the data in this file must match the timeslice diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index 5cb00b160..8f45fc320 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -164,32 +164,28 @@ def compress_timeslice( x_levels = x.timeslice.to_index().names if level not in x_levels: raise ValueError(f"Unknown level: {level}. Must be one of {x_levels}.") - current_level, coarser_levels = x_levels[-1], x_levels[:-1] # Return x unchanged if already at the desired level - if current_level == level: + if get_level(x) == level: return x - # Perform the operation over one timeslice level + # Prepare mask + idx = x_levels.index(level) + kept_levels, compressed_levels = x_levels[: idx + 1], x_levels[idx + 1 :] + mask = ts.unstack(dim="timeslice") if operation == "sum": - x = ( - x.unstack(dim="timeslice") - .sum(current_level) - .stack(timeslice=coarser_levels) - ) - # return x.unstack(dim="timeslice").sum(["hour"]).stack(timeslice=["month", "day"]) + mask = mask.where(np.isnan(mask), 1) elif operation == "mean": - # TODO: This should be a weighted mean according to timeslice length - x = ( - x.unstack(dim="timeslice") - .mean(current_level) - .stack(timeslice=coarser_levels) - ) + mask = mask / mask.sum(compressed_levels) else: raise ValueError(f"Unknown operation: {operation}. Must be 'sum' or 'mean'.") - # Recurse - return compress_timeslice(x, ts=ts, level=level, operation=operation) + # Perform the operation + return ( + (x.unstack(dim="timeslice") * mask) + .sum(compressed_levels) + .stack(timeslice=kept_levels) + ) # TODO: this is messing up the order of timeslices def expand_timeslice( diff --git a/tests/test_timeslices.py b/tests/test_timeslices.py index 687da7122..925beba89 100644 --- a/tests/test_timeslices.py +++ b/tests/test_timeslices.py @@ -201,7 +201,8 @@ def test_compress_timeslice(non_timesliced_dataarray, timeslice): assert get_level(out) == level assert ( out.mean("timeslice") == approx(timesliced_dataarray.mean("timeslice")) - ).all() + ).all() # NB in general this should be a weighted mean, but this works here + # because the data is equal in every timeslice # Calling without specifying a level: the input should be returned unchanged out = compress_timeslice(timesliced_dataarray) From 4fd085c107808c4e3364810d74f63c2ef59a3315 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Fri, 15 Nov 2024 17:55:00 +0000 Subject: [PATCH 82/92] sort_timeslice and timeslice_max functions --- src/muse/investments.py | 19 +------------------ src/muse/readers/csv.py | 5 ++--- src/muse/timeslices.py | 27 +++++++++++++++++++++++++-- 3 files changed, 28 insertions(+), 23 deletions(-) diff --git a/src/muse/investments.py b/src/muse/investments.py index 72f1209cc..60357ea0c 100644 --- a/src/muse/investments.py +++ b/src/muse/investments.py @@ -64,6 +64,7 @@ def investment( from muse.errors import GrowthOfCapacityTooConstrained from muse.outputs.cache import cache_quantity from muse.registration import registrator +from muse.timeslices import timeslice_max INVESTMENT_SIGNATURE = Callable[ [xr.DataArray, xr.DataArray, xr.Dataset, list[Constraint], KwArg(Any)], @@ -388,21 +389,3 @@ def default_to_scipy(): return solution -def timeslice_max(x: xr.DataArray) -> xr.DataArray: - """Find the max value over the timeslice dimension, normlaized for timeslice length. - - This first annualizes the value in each timeslice by dividing by the fraction of the - year that the timeslice occupies, then takes the maximum value - """ - from muse.timeslices import ( - TIMESLICE, - broadcast_timeslice, - compress_timeslice, - get_level, - ) - - timeslice_level = get_level(x) - timeslice_fractions = compress_timeslice( - TIMESLICE, level=timeslice_level - ) / broadcast_timeslice(TIMESLICE.sum(), level=timeslice_level) - return (x / timeslice_fractions).max("timeslice") diff --git a/src/muse/readers/csv.py b/src/muse/readers/csv.py index c3dc436e5..c202e4ff4 100644 --- a/src/muse/readers/csv.py +++ b/src/muse/readers/csv.py @@ -136,7 +136,7 @@ def to_agent_share(name): def read_technodata_timeslices(filename: Union[str, Path]) -> xr.Dataset: from muse.readers import camel_to_snake - from muse.timeslices import TIMESLICE + from muse.timeslices import sort_timeslice csv = pd.read_csv(filename, float_precision="high", low_memory=False) csv = csv.rename(columns=camel_to_snake) @@ -170,8 +170,7 @@ def read_technodata_timeslices(filename: Union[str, Path]) -> xr.Dataset: if item not in ["technology", "region", "year"] ] result = result.stack(timeslice=timeslice_levels) - result = result.sel(timeslice=TIMESLICE.timeslice) - # sorts timeslices into the correct order + result = sort_timeslice(result) return result diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index 8f45fc320..f3712b3ce 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -241,12 +241,12 @@ def expand_timeslice( ) # Perform the operation - return ( + result = ( (x.unstack(dim="timeslice") * mask) .stack(timeslice=ts_levels) .dropna("timeslice") - .sel(timeslice=ts.timeslice) ) + return sort_timeslices(result, ts) def drop_timeslice(data: DataArray) -> DataArray: @@ -265,3 +265,26 @@ def get_level(data: DataArray) -> str: if "timeslice" not in data.dims: raise ValueError("DataArray does not have a 'timeslice' dimension.") return data.timeslice.to_index().names[-1] + + +def sort_timeslices(data: DataArray, ts: Optional[DataArray] = None) -> DataArray: + """Sorts the timeslices of a DataArray according to a reference timeslice.""" + if ts is None: + ts = TIMESLICE + return data.sel(timeslice=ts.timeslice) + + +def timeslice_max(x: DataArray, ts: Optional[DataArray] = None) -> DataArray: + """Find the max value over the timeslice dimension, normlaized for timeslice length. + + This first annualizes the value in each timeslice by dividing by the fraction of the + year that the timeslice occupies, then takes the maximum value + """ + if ts is None: + ts = TIMESLICE + + timeslice_level = get_level(x) + timeslice_fractions = compress_timeslice( + ts, level=timeslice_level + ) / broadcast_timeslice(ts.sum(), level=timeslice_level) + return (x / timeslice_fractions).max("timeslice") From dcbed0dbd04aba626eeb256da1d5a262cdd617bd Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 17:56:50 +0000 Subject: [PATCH 83/92] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/muse/investments.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/muse/investments.py b/src/muse/investments.py index 60357ea0c..39f565cd5 100644 --- a/src/muse/investments.py +++ b/src/muse/investments.py @@ -387,5 +387,3 @@ def default_to_scipy(): solution = cast(Callable[[np.ndarray], xr.Dataset], adapter.to_muse)(list(res["x"])) return solution - - From 10147512c717939889844b5d41c8ba429aa0e4ff Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Mon, 18 Nov 2024 14:02:27 +0000 Subject: [PATCH 84/92] New tests, some robustness improvements --- src/muse/readers/csv.py | 5 +- src/muse/timeslices.py | 26 ++++-- tests/test_timeslices.py | 192 +++++++++++++++++++++++---------------- 3 files changed, 132 insertions(+), 91 deletions(-) diff --git a/src/muse/readers/csv.py b/src/muse/readers/csv.py index c202e4ff4..d9fbd7cfc 100644 --- a/src/muse/readers/csv.py +++ b/src/muse/readers/csv.py @@ -136,7 +136,7 @@ def to_agent_share(name): def read_technodata_timeslices(filename: Union[str, Path]) -> xr.Dataset: from muse.readers import camel_to_snake - from muse.timeslices import sort_timeslice + from muse.timeslices import sort_timeslices csv = pd.read_csv(filename, float_precision="high", low_memory=False) csv = csv.rename(columns=camel_to_snake) @@ -170,8 +170,7 @@ def read_technodata_timeslices(filename: Union[str, Path]) -> xr.Dataset: if item not in ["technology", "region", "year"] ] result = result.stack(timeslice=timeslice_levels) - result = sort_timeslice(result) - return result + return sort_timeslices(result) def read_io_technodata(filename: Union[str, Path]) -> xr.Dataset: diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index f3712b3ce..06f94067a 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -85,7 +85,7 @@ def broadcast_timeslice( ts = TIMESLICE if level is not None: - ts = compress_timeslice(ts, level=level, operation="sum") + ts = compress_timeslice(ts, ts=ts, level=level, operation="sum") # If x already has timeslices, check that it matches the reference timeslice. if "timeslice" in x.dims: @@ -115,7 +115,7 @@ def distribute_timeslice( ts = TIMESLICE if level is not None: - ts = compress_timeslice(ts, level=level, operation="sum") + ts = compress_timeslice(ts, ts=ts, level=level, operation="sum") # If x already has timeslices, check that it matches the reference timeslice. if "timeslice" in x.dims: @@ -123,8 +123,8 @@ def distribute_timeslice( return x raise ValueError("x has incompatible timeslicing.") - broadcasted = broadcast_timeslice(x, ts) - timeslice_fractions = ts / broadcast_timeslice(ts.sum(), ts) + broadcasted = broadcast_timeslice(x, ts=ts) + timeslice_fractions = ts / broadcast_timeslice(ts.sum(), ts=ts) return broadcasted * timeslice_fractions @@ -181,11 +181,12 @@ def compress_timeslice( raise ValueError(f"Unknown operation: {operation}. Must be 'sum' or 'mean'.") # Perform the operation - return ( + result = ( (x.unstack(dim="timeslice") * mask) .sum(compressed_levels) .stack(timeslice=kept_levels) - ) # TODO: this is messing up the order of timeslices + ) + return sort_timeslices(result, ts) def expand_timeslice( @@ -271,11 +272,16 @@ def sort_timeslices(data: DataArray, ts: Optional[DataArray] = None) -> DataArra """Sorts the timeslices of a DataArray according to a reference timeslice.""" if ts is None: ts = TIMESLICE - return data.sel(timeslice=ts.timeslice) + + # If data is at the finest timeslice level, sort timeslices according to ts + if get_level(data) == get_level(ts): + return data.sel(timeslice=ts.timeslice) + # Otherwise, sort timeslices in alphabetical order + return data.sortby("timeslice") def timeslice_max(x: DataArray, ts: Optional[DataArray] = None) -> DataArray: - """Find the max value over the timeslice dimension, normlaized for timeslice length. + """Find the max value over the timeslice dimension, normalized for timeslice length. This first annualizes the value in each timeslice by dividing by the fraction of the year that the timeslice occupies, then takes the maximum value @@ -285,6 +291,6 @@ def timeslice_max(x: DataArray, ts: Optional[DataArray] = None) -> DataArray: timeslice_level = get_level(x) timeslice_fractions = compress_timeslice( - ts, level=timeslice_level - ) / broadcast_timeslice(ts.sum(), level=timeslice_level) + ts, ts=ts, level=timeslice_level, operation="sum" + ) / broadcast_timeslice(ts.sum(), ts=ts, level=timeslice_level) return (x / timeslice_fractions).max("timeslice") diff --git a/tests/test_timeslices.py b/tests/test_timeslices.py index 925beba89..0a3ae55a2 100644 --- a/tests/test_timeslices.py +++ b/tests/test_timeslices.py @@ -1,55 +1,17 @@ """Test timeslice utilities.""" +import numpy as np from pytest import approx, fixture, raises from xarray import DataArray @fixture -def toml(): - return """ - ["timeslices"] - winter.weekday.day = 10 - winter.weekday.night = 5 - winter.weekend.day = 2 - winter.weekend.night = 2 - winter.weekend.dusk = 1 - summer.weekday.day = 5 - summer.weekday.night = 5 - summer.weekend.day = 2 - summer.weekend.night = 2 - summer.weekend.dusk = 1 - level_names = ["semester", "week", "day"] - """ - - -@fixture -def reference(toml): - from muse.timeslices import read_timeslices - - return read_timeslices(toml) +def non_timesliced_dataarray(): + return DataArray([1, 2, 3], dims=["x"]) @fixture -def timeslice_dataarray(reference): - from pandas import MultiIndex - - return DataArray( - [1, 2, 3], - coords={ - "timeslice": MultiIndex.from_tuples( - [ - ("winter", "weekday", "allday"), - ("winter", "weekend", "dusk"), - ("summer", "weekend", "night"), - ], - names=reference.get_index("timeslice").names, - ) - }, - dims="timeslice", - ) - - -def test_read_timeslices(): +def timeslice(): from toml import loads from muse.timeslices import read_timeslices @@ -81,6 +43,7 @@ def test_read_timeslices(): ts = read_timeslices(inputs) assert isinstance(ts, DataArray) assert "timeslice" in ts.coords + return ts def test_no_overlap(): @@ -102,31 +65,30 @@ def test_no_overlap(): ) -def test_drop_timeslice(timeslice_dataarray): - from muse.timeslices import drop_timeslice +def test_drop_timeslice(non_timesliced_dataarray, timeslice): + from muse.timeslices import broadcast_timeslice, drop_timeslice - dropped = drop_timeslice(timeslice_dataarray) - coords_to_check = {"timeslice", "semester", "week", "day"} - assert coords_to_check.issubset(timeslice_dataarray.coords) + # Test on array with timeslice data + timesliced_dataarray = broadcast_timeslice(non_timesliced_dataarray, ts=timeslice) + dropped = drop_timeslice(timesliced_dataarray) + coords_to_check = {"timeslice", "month", "day", "hour"} + assert coords_to_check.issubset(timesliced_dataarray.coords) assert not coords_to_check.intersection(dropped.coords) # Test on arrays without timeslice data - data_without_timeslice = DataArray([1, 2, 3], dims=["x"]) - assert drop_timeslice(data_without_timeslice).equals(data_without_timeslice) + assert drop_timeslice(non_timesliced_dataarray).equals(non_timesliced_dataarray) assert drop_timeslice(dropped).equals(dropped) -@fixture -def non_timesliced_dataarray(): - return DataArray([1, 2, 3], dims=["x"]) - - def test_broadcast_timeslice(non_timesliced_dataarray, timeslice): from muse.timeslices import broadcast_timeslice, compress_timeslice + # Broadcast array to different levels of granularity for level in ["month", "day", "hour"]: - out = broadcast_timeslice(non_timesliced_dataarray, level=level) - target_timeslices = compress_timeslice(timeslice, level=level) + out = broadcast_timeslice(non_timesliced_dataarray, ts=timeslice, level=level) + target_timeslices = compress_timeslice( + timeslice, ts=timeslice, level=level, operation="sum" + ) # Check that timeslicing in output matches the global scheme assert out.timeslice.equals(target_timeslices.timeslice) @@ -141,12 +103,14 @@ def test_broadcast_timeslice(non_timesliced_dataarray, timeslice): ) # Calling on a fully timesliced array: the input should be returned unchanged - out2 = broadcast_timeslice(out) + out2 = broadcast_timeslice(out, ts=timeslice) assert out2.equals(out) - # Calling on an array with inappropraite timeslicing: ValueError should be raised + # Calling on an array with inappropriate timeslicing: ValueError should be raised with raises(ValueError): - broadcast_timeslice(compress_timeslice(out, level="day")) + broadcast_timeslice( + compress_timeslice(out, ts=timeslice, level="day"), ts=timeslice + ) def test_distribute_timeslice(non_timesliced_dataarray, timeslice): @@ -156,17 +120,22 @@ def test_distribute_timeslice(non_timesliced_dataarray, timeslice): distribute_timeslice, ) + # Distribute array to different levels of granularity for level in ["month", "day", "hour"]: - out = distribute_timeslice(non_timesliced_dataarray, level=level) - target_timeslices = compress_timeslice(timeslice, level=level) + out = distribute_timeslice(non_timesliced_dataarray, ts=timeslice, level=level) + target_timeslices = compress_timeslice( + timeslice, ts=timeslice, level=level, operation="sum" + ) # Check that timeslicing in output matches the global scheme assert out.timeslice.equals(target_timeslices.timeslice) # Check that all values are proportional to timeslice lengths - out_proportions = out / broadcast_timeslice(out.sum("timeslice"), level=level) + out_proportions = out / broadcast_timeslice( + out.sum("timeslice"), ts=timeslice, level=level + ) ts_proportions = target_timeslices / broadcast_timeslice( - target_timeslices.sum("timeslice"), level=level + target_timeslices.sum("timeslice"), ts=timeslice, level=level ) assert abs(out_proportions - ts_proportions).max() < 1e-6 @@ -174,30 +143,37 @@ def test_distribute_timeslice(non_timesliced_dataarray, timeslice): assert (out.sum("timeslice") == approx(non_timesliced_dataarray)).all() # Calling on a fully timesliced array: the input should be returned unchanged - out2 = distribute_timeslice(out) + out2 = distribute_timeslice(out, ts=timeslice) assert out2.equals(out) # Calling on an array with inappropraite timeslicing: ValueError should be raised with raises(ValueError): - distribute_timeslice(compress_timeslice(out, level="day")) + distribute_timeslice( + compress_timeslice(out, ts=timeslice, level="day"), ts=timeslice + ) def test_compress_timeslice(non_timesliced_dataarray, timeslice): from muse.timeslices import broadcast_timeslice, compress_timeslice, get_level # Create timesliced dataarray for compressing - timesliced_dataarray = broadcast_timeslice(non_timesliced_dataarray) + timesliced_dataarray = broadcast_timeslice(non_timesliced_dataarray, ts=timeslice) + # Compress array to different levels of granularity for level in ["month", "day", "hour"]: # Sum operation - out = compress_timeslice(timesliced_dataarray, operation="sum", level=level) + out = compress_timeslice( + timesliced_dataarray, ts=timeslice, operation="sum", level=level + ) assert get_level(out) == level assert ( out.sum("timeslice") == approx(timesliced_dataarray.sum("timeslice")) ).all() # Mean operation - out = compress_timeslice(timesliced_dataarray, operation="mean", level=level) + out = compress_timeslice( + timesliced_dataarray, ts=timeslice, operation="mean", level=level + ) assert get_level(out) == level assert ( out.mean("timeslice") == approx(timesliced_dataarray.mean("timeslice")) @@ -205,48 +181,108 @@ def test_compress_timeslice(non_timesliced_dataarray, timeslice): # because the data is equal in every timeslice # Calling without specifying a level: the input should be returned unchanged - out = compress_timeslice(timesliced_dataarray) + out = compress_timeslice(timesliced_dataarray, ts=timeslice) assert out.equals(timesliced_dataarray) # Calling with an invalid level: ValueError should be raised with raises(ValueError): - compress_timeslice(timesliced_dataarray, level="invalid") + compress_timeslice(timesliced_dataarray, ts=timeslice, level="invalid") # Calling with an invalid operation: ValueError should be raised with raises(ValueError): - compress_timeslice(timesliced_dataarray, level="day", operation="invalid") + compress_timeslice( + timesliced_dataarray, ts=timeslice, level="day", operation="invalid" + ) def test_expand_timeslice(non_timesliced_dataarray, timeslice): from muse.timeslices import broadcast_timeslice, expand_timeslice + # Different starting points for expansion for level in ["month", "day", "hour"]: - # Create timesliced dataarray for expanding timesliced_dataarray = broadcast_timeslice( - non_timesliced_dataarray, level=level + non_timesliced_dataarray, ts=timeslice, level=level ) # Broadcast operation - out = expand_timeslice(timesliced_dataarray, operation="broadcast") + out = expand_timeslice( + timesliced_dataarray, ts=timeslice, operation="broadcast" + ) assert out.timeslice.equals(timeslice.timeslice) assert ( out.mean("timeslice") == approx(timesliced_dataarray.mean("timeslice")) ).all() # Distribute operation - out = expand_timeslice(timesliced_dataarray, operation="distribute") + out = expand_timeslice( + timesliced_dataarray, ts=timeslice, operation="distribute" + ) assert out.timeslice.equals(timeslice.timeslice) assert ( out.sum("timeslice") == approx(timesliced_dataarray.sum("timeslice")) ).all() # Calling on an already expanded array: the input should be returned unchanged - out2 = expand_timeslice(out) + out2 = expand_timeslice(out, ts=timeslice) assert out.equals(out2) # Calling with an invalid operation: ValueError should be raised with raises(ValueError): timesliced_dataarray = broadcast_timeslice( - non_timesliced_dataarray, level="month" + non_timesliced_dataarray, ts=timeslice, level="month" + ) + expand_timeslice(timesliced_dataarray, ts=timeslice, operation="invalid") + + +def test_get_level(non_timesliced_dataarray, timeslice): + from muse.timeslices import broadcast_timeslice, get_level + + for level in ["month", "day", "hour"]: + timesliced_dataarray = broadcast_timeslice( + non_timesliced_dataarray, ts=timeslice, level=level ) - expand_timeslice(timesliced_dataarray, operation="invalid") + assert get_level(timesliced_dataarray) == level + + # Should raise error with non-timesliced array + with raises(ValueError): + get_level(non_timesliced_dataarray) + + +def test_sort_timeslices(non_timesliced_dataarray, timeslice): + from muse.timeslices import broadcast_timeslice, sort_timeslices + + # Finest timeslice level -> should match ordering of `timeslice` + timesliced_dataarray = broadcast_timeslice( + non_timesliced_dataarray, ts=timeslice, level="hour" + ) + sorted = sort_timeslices(timesliced_dataarray, timeslice) + assert sorted.timeslice.equals(timeslice.timeslice) + assert not sorted.timeslice.equals( + timesliced_dataarray.sortby("timeslice").timeslice + ) # but could be true if the timeslices in `timeslice` are in alphabetical order + + # Coarser timeslice level -> should match xarray sortby + timesliced_dataarray = broadcast_timeslice( + non_timesliced_dataarray, ts=timeslice, level="month" + ) + sorted = sort_timeslices(timesliced_dataarray, timeslice) + assert sorted.timeslice.equals(timesliced_dataarray.sortby("timeslice").timeslice) + + +def test_timeslice_max(non_timesliced_dataarray): + from muse.timeslices import broadcast_timeslice, read_timeslices, timeslice_max + + # With two equal timeslice lengths, this should be equivalent to max * 2 + ts = read_timeslices( + """ + [timeslices] + winter.weekday.night = 396 + winter.weekday.morning = 396 + """ + ) + timesliced_dataarray = broadcast_timeslice(non_timesliced_dataarray, ts=ts) + timesliced_dataarray = timesliced_dataarray + np.random.rand( + *timesliced_dataarray.shape + ) + timeslice_max_dataarray = timeslice_max(timesliced_dataarray, ts=ts) + assert timeslice_max_dataarray.equals(timesliced_dataarray.max("timeslice") * 2) From 88e0540f65931febaef13944b006460ed3be20c6 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Mon, 18 Nov 2024 14:13:52 +0000 Subject: [PATCH 85/92] Move import statements --- src/muse/constraints.py | 5 +---- src/muse/costs.py | 2 -- src/muse/mca.py | 6 +----- src/muse/objectives.py | 9 +-------- src/muse/quantities.py | 11 ++--------- src/muse/sectors/sector.py | 5 +---- 6 files changed, 6 insertions(+), 32 deletions(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index daa49f349..59f5a08fa 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -115,7 +115,7 @@ def constraints( from mypy_extensions import KwArg from muse.registration import registrator -from muse.timeslices import drop_timeslice +from muse.timeslices import broadcast_timeslice, distribute_timeslice, drop_timeslice CAPACITY_DIMS = "asset", "replacement", "region" """Default dimensions for capacity decision variables.""" @@ -460,7 +460,6 @@ def max_production( from xarray import ones_like, zeros_like from muse.commodities import is_enduse - from muse.timeslices import broadcast_timeslice, distribute_timeslice if year is None: year = int(market.year.min()) @@ -738,7 +737,6 @@ def minimum_service( from xarray import ones_like, zeros_like from muse.commodities import is_enduse - from muse.timeslices import broadcast_timeslice, distribute_timeslice if "minimum_service_factor" not in technologies.data_vars: return None @@ -826,7 +824,6 @@ def lp_costs( from xarray import zeros_like from muse.commodities import is_enduse - from muse.timeslices import broadcast_timeslice, distribute_timeslice assert "year" not in technologies.dims diff --git a/src/muse/costs.py b/src/muse/costs.py index cd6084003..4461113eb 100644 --- a/src/muse/costs.py +++ b/src/muse/costs.py @@ -244,8 +244,6 @@ def lifetime_levelized_cost_of_energy( Return: xr.DataArray with the LCOE calculated for the relevant technologies """ - from muse.timeslices import broadcast_timeslice, distribute_timeslice - techs = technologies[ [ "technical_life", diff --git a/src/muse/mca.py b/src/muse/mca.py index 37e3c34cf..41a4000ea 100644 --- a/src/muse/mca.py +++ b/src/muse/mca.py @@ -14,7 +14,7 @@ from muse.outputs.cache import OutputCache from muse.readers import read_initial_market from muse.sectors import SECTORS_REGISTERED, AbstractSector, Sector -from muse.timeslices import drop_timeslice +from muse.timeslices import broadcast_timeslice, drop_timeslice from muse.utilities import future_propagation @@ -41,7 +41,6 @@ def factory(cls, settings: str | Path | Mapping | Any) -> MCA: from muse.outputs.mca import factory as ofactory from muse.readers import read_settings from muse.readers.toml import convert - from muse.timeslices import drop_timeslice if isinstance(settings, (str, Path)): settings = read_settings(settings) # type: ignore @@ -276,8 +275,6 @@ def run(self) -> None: from xarray import DataArray - from muse.timeslices import broadcast_timeslice - nyear = len(self.time_framework) - 1 check_carbon_budget = len(self.carbon_budget) and len(self.carbon_commodities) shoots = self.control_undershoot or self.control_overshoot @@ -362,7 +359,6 @@ def single_year_iteration( from copy import deepcopy from muse.commodities import is_enduse - from muse.timeslices import drop_timeslice sectors = deepcopy(sectors) market = market.copy(deep=True) diff --git a/src/muse/objectives.py b/src/muse/objectives.py index cd9f7a137..ed865d8e7 100644 --- a/src/muse/objectives.py +++ b/src/muse/objectives.py @@ -71,7 +71,7 @@ def comfort( from muse.outputs.cache import cache_quantity from muse.registration import registrator -from muse.timeslices import drop_timeslice +from muse.timeslices import broadcast_timeslice, distribute_timeslice, drop_timeslice from muse.utilities import filter_input OBJECTIVE_SIGNATURE = Callable[ @@ -134,8 +134,6 @@ def objectives( *args, **kwargs, ) -> xr.Dataset: - from muse.timeslices import broadcast_timeslice - result = xr.Dataset() for name, objective in functions: obj = objective( @@ -329,7 +327,6 @@ def emission_cost( with :math:`s` the timeslices and :math:`c` the commodity. """ from muse.commodities import is_enduse, is_pollutant - from muse.timeslices import distribute_timeslice enduses = is_enduse(technologies.comm_usage.sel(commodity=demand.commodity)) total = demand.sel(commodity=enduses).sum("commodity") @@ -400,7 +397,6 @@ def lifetime_levelized_cost_of_energy( """ from muse.costs import lifetime_levelized_cost_of_energy as LCOE from muse.quantities import capacity_to_service_demand - from muse.timeslices import broadcast_timeslice, distribute_timeslice capacity = capacity_to_service_demand( technologies=technologies, demand=demand, timeslice_level=timeslice_level @@ -438,7 +434,6 @@ def net_present_value( """ from muse.costs import net_present_value as NPV from muse.quantities import capacity_to_service_demand - from muse.timeslices import broadcast_timeslice, distribute_timeslice capacity = capacity_to_service_demand(technologies=technologies, demand=demand) production = ( @@ -473,7 +468,6 @@ def net_present_cost( """ from muse.costs import net_present_cost as NPC from muse.quantities import capacity_to_service_demand - from muse.timeslices import broadcast_timeslice, distribute_timeslice capacity = capacity_to_service_demand(technologies=technologies, demand=demand) production = ( @@ -507,7 +501,6 @@ def equivalent_annual_cost( """ from muse.costs import equivalent_annual_cost as EAC from muse.quantities import capacity_to_service_demand - from muse.timeslices import broadcast_timeslice, distribute_timeslice capacity = capacity_to_service_demand(technologies=technologies, demand=demand) production = ( diff --git a/src/muse/quantities.py b/src/muse/quantities.py index b4e5c8a9a..27ccfba3c 100644 --- a/src/muse/quantities.py +++ b/src/muse/quantities.py @@ -13,6 +13,8 @@ import numpy as np import xarray as xr +from muse.timeslices import broadcast_timeslice, distribute_timeslice + def supply( capacity: xr.DataArray, @@ -42,7 +44,6 @@ def supply( input commodities). """ from muse.commodities import CommodityUsage, check_usage, is_pollutant - from muse.timeslices import broadcast_timeslice maxprod = maximum_production( technologies, capacity, timeslice_level=timeslice_level @@ -138,7 +139,6 @@ def emission( A data array containing emissions (and only emissions). """ from muse.commodities import is_enduse, is_pollutant - from muse.timeslices import broadcast_timeslice from muse.utilities import broadcast_techs # just in case we are passed a technologies dataset, like in other functions @@ -170,7 +170,6 @@ def gross_margin( - non-environmental commodities OUTPUTS are related to revenues. """ from muse.commodities import is_enduse, is_pollutant - from muse.timeslices import distribute_timeslice from muse.utilities import broadcast_techs tech = broadcast_techs( # type: ignore @@ -298,7 +297,6 @@ def consumption( are not given, then flexible consumption is *not* considered. """ from muse.commodities import is_enduse, is_fuel - from muse.timeslices import broadcast_timeslice from muse.utilities import filter_with_template params = filter_with_template( @@ -381,7 +379,6 @@ def maximum_production( filters and the set of technologies in `capacity`. """ from muse.commodities import is_enduse - from muse.timeslices import broadcast_timeslice, distribute_timeslice from muse.utilities import broadcast_techs, filter_input capa = filter_input( @@ -429,7 +426,6 @@ def capacity_in_use( Capacity-in-use for each technology, whittled down by the filters. """ from muse.commodities import is_enduse - from muse.timeslices import broadcast_timeslice from muse.utilities import broadcast_techs, filter_input prod = filter_input( @@ -497,7 +493,6 @@ def minimum_production( the filters and the set of technologies in `capacity`. """ from muse.commodities import is_enduse - from muse.timeslices import broadcast_timeslice, distribute_timeslice from muse.utilities import broadcast_techs, filter_input capa = filter_input( @@ -531,8 +526,6 @@ def capacity_to_service_demand( timeslice_level: Optional[str] = None, ) -> xr.DataArray: """Minimum capacity required to fulfill the demand.""" - from muse.timeslices import broadcast_timeslice, distribute_timeslice - timeslice_outputs = distribute_timeslice( technologies.fixed_outputs.sel(commodity=demand.commodity), level=timeslice_level, diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index 6b6ff6dc7..89e64123f 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -15,6 +15,7 @@ from muse.sectors.abstract import AbstractSector from muse.sectors.register import register_sector from muse.sectors.subsector import Subsector +from muse.timeslices import compress_timeslice, expand_timeslice @register_sector(name="default") @@ -313,8 +314,6 @@ def market_variables(self, market: xr.Dataset, technologies: xr.Dataset) -> Any: def convert_to_sector_timeslicing(self, market: xr.Dataset) -> xr.Dataset: """Converts market data to sector timeslicing.""" - from muse.timeslices import compress_timeslice - supply = compress_timeslice( market["supply"], level=self.timeslice_level, operation="sum" ) @@ -328,8 +327,6 @@ def convert_to_sector_timeslicing(self, market: xr.Dataset) -> xr.Dataset: def convert_to_global_timeslicing(self, market: xr.Dataset) -> xr.Dataset: """Converts market data to global timeslicing.""" - from muse.timeslices import expand_timeslice - supply = expand_timeslice(market["supply"], operation="distribute") consumption = expand_timeslice(market["consumption"], operation="distribute") costs = expand_timeslice(market["costs"], operation="distribute") From 959148dc9a70748cfd5db81549078e0130929aa1 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Mon, 18 Nov 2024 14:25:21 +0000 Subject: [PATCH 86/92] Add check to sector init --- src/muse/sectors/sector.py | 44 ++++++++++++++++++++++++++------------ 1 file changed, 30 insertions(+), 14 deletions(-) diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index 89e64123f..463c6f99e 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -15,7 +15,7 @@ from muse.sectors.abstract import AbstractSector from muse.sectors.register import register_sector from muse.sectors.subsector import Subsector -from muse.timeslices import compress_timeslice, expand_timeslice +from muse.timeslices import compress_timeslice, expand_timeslice, get_level @register_sector(name="default") @@ -112,21 +112,31 @@ def __init__( from muse.outputs.sector import factory as ofactory from muse.production import maximum_production + """Name of the sector.""" self.name: str = name + + """Timeslice level for the sector (e.g. "month").""" self.timeslice_level = timeslice_level - """Name of the sector.""" - self.subsectors: Sequence[Subsector] = list(subsectors) + """Subsectors controlled by this object.""" - self.technologies: xr.Dataset = technologies + self.subsectors: Sequence[Subsector] = list(subsectors) + """Parameters describing the sector's technologies.""" + self.technologies: xr.Dataset = technologies + if "timeslice" in self.technologies.dims: + if not get_level(self.technologies) == self.timeslice_level: + raise ValueError( + f"Technodata for {self.name} sector does not match " + "the specified timeslice level for that sector " + f"({self.timeslice_level})" + ) + + """Interpolation method and arguments when computing years.""" self.interpolation: Mapping[str, Any] = { "method": interpolation, "kwargs": {"fill_value": "extrapolate"}, } - """Interpolation method and arguments when computing years.""" - if interactions is None: - interactions = interaction_factory() - self.interactions = interactions + """Interactions between agents. Called right before computing new investments, this function should manage any @@ -143,20 +153,26 @@ def __init__( :py:mod:`muse.interactions` contains MUSE's base interactions """ + if interactions is None: + interactions = interaction_factory() + self.interactions = interactions + + """A function for outputting data for post-mortem analysis.""" self.outputs: Callable = ( cast(Callable, ofactory()) if outputs is None else outputs ) - """A function for outputting data for post-mortem analysis.""" - self.supply_prod = ( - supply_prod if supply_prod is not None else maximum_production - ) - """ Computes production as used to return the supply to the MCA. + + """Computes production as used to return the supply to the MCA. It can be anything registered with :py:func:`@register_production`. """ - self.output_data: xr.Dataset + self.supply_prod = ( + supply_prod if supply_prod is not None else maximum_production + ) + """Full supply, consumption and costs data for the most recent year.""" + self.output_data: xr.Dataset @property def forecast(self): From 4a81d941f1d280bed3614fb4db9c8374565995a7 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Mon, 18 Nov 2024 14:54:35 +0000 Subject: [PATCH 87/92] Variable names, docstrings --- src/muse/mca.py | 1 - src/muse/sectors/sector.py | 2 - src/muse/timeslices.py | 86 ++++++++++++++++++++++---------------- 3 files changed, 51 insertions(+), 38 deletions(-) diff --git a/src/muse/mca.py b/src/muse/mca.py index 41a4000ea..28e955358 100644 --- a/src/muse/mca.py +++ b/src/muse/mca.py @@ -370,7 +370,6 @@ def single_year_iteration( sector_market = sector.next( market[["supply", "consumption", "prices"]] # type:ignore ) - # TODO: check sector_market is in global timeslicing scheme sector_market = sector_market.sel(year=market.year) # Calculate net consumption diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index 463c6f99e..a4d78a114 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -210,7 +210,6 @@ def group_assets(x: xr.DataArray) -> xr.DataArray: self.interactions(list(self.agents)) # Convert market to sector timeslicing - # TODO: check mca_market is in global timeslicing scheme mca_market = self.convert_to_sector_timeslicing(mca_market) # Select appropriate data from the market @@ -346,7 +345,6 @@ def convert_to_global_timeslicing(self, market: xr.Dataset) -> xr.Dataset: supply = expand_timeslice(market["supply"], operation="distribute") consumption = expand_timeslice(market["consumption"], operation="distribute") costs = expand_timeslice(market["costs"], operation="distribute") - return xr.Dataset(dict(supply=supply, consumption=consumption, costs=costs)) @property diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index 06f94067a..a89dec86b 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -21,7 +21,7 @@ def read_timeslices( settings: Union[Mapping, str], level_names: Sequence[str] = ("month", "day", "hour"), -) -> pd.DataFrame: +) -> DataArray: from functools import reduce from toml import loads @@ -67,14 +67,14 @@ def setup_module(settings: Union[str, Mapping]): def broadcast_timeslice( - x: DataArray, ts: Optional[DataArray] = None, level: Optional[str] = None + data: DataArray, ts: Optional[DataArray] = None, level: Optional[str] = None ) -> DataArray: """Convert a non-timesliced array to a timesliced array by broadcasting. If x is already timesliced in the appropriate scheme, it will be returned unchanged. Args: - x: Array to broadcast. + data: Array to broadcast. ts: Dataarray with timeslice lengths. If None, defaults to the global timeslice. level: Level to broadcast to. If None, use the finest level of ts. @@ -88,25 +88,27 @@ def broadcast_timeslice( ts = compress_timeslice(ts, ts=ts, level=level, operation="sum") # If x already has timeslices, check that it matches the reference timeslice. - if "timeslice" in x.dims: - if x.timeslice.reset_coords(drop=True).equals(ts.timeslice): - return x + if "timeslice" in data.dims: + if data.timeslice.reset_coords(drop=True).equals(ts.timeslice): + return data raise ValueError("x has incompatible timeslicing.") mindex_coords = Coordinates.from_pandas_multiindex(ts.timeslice, "timeslice") - broadcasted = x.expand_dims(timeslice=ts["timeslice"]).assign_coords(mindex_coords) + broadcasted = data.expand_dims(timeslice=ts["timeslice"]).assign_coords( + mindex_coords + ) return broadcasted def distribute_timeslice( - x: DataArray, ts: Optional[DataArray] = None, level=None + data: DataArray, ts: Optional[DataArray] = None, level=None ) -> DataArray: """Convert a non-timesliced array to a timesliced array by distribution. If x is already timesliced in the appropriate scheme, it will be returned unchanged. Args: - x: Array to distribute. + data: Array to distribute. ts: Dataarray with timeslice lengths. If None, defaults to the global timeslice. level: Level to distribute to. If None, use the finest level of ts. @@ -118,18 +120,18 @@ def distribute_timeslice( ts = compress_timeslice(ts, ts=ts, level=level, operation="sum") # If x already has timeslices, check that it matches the reference timeslice. - if "timeslice" in x.dims: - if x.timeslice.reset_coords(drop=True).equals(ts.timeslice): - return x + if "timeslice" in data.dims: + if data.timeslice.reset_coords(drop=True).equals(ts.timeslice): + return data raise ValueError("x has incompatible timeslicing.") - broadcasted = broadcast_timeslice(x, ts=ts) + broadcasted = broadcast_timeslice(data, ts=ts) timeslice_fractions = ts / broadcast_timeslice(ts.sum(), ts=ts) return broadcasted * timeslice_fractions def compress_timeslice( - x: DataArray, + data: DataArray, ts: Optional[DataArray] = None, level: Optional[str] = None, operation: str = "sum", @@ -138,10 +140,11 @@ def compress_timeslice( The operation can be either 'sum', or 'mean': - sum: sum values at each compressed timeslice level - - mean: take a weighted average of values at each compressed timeslice level + - mean: take a weighted average of values at each compressed timeslice level, + according to timeslice length Args: - x: Timesliced array to compress. Must have the same timeslicing as ts. + data: Timesliced array to compress. Must have the same timeslicing as ts. ts: Dataarray with timeslice lengths. If None, defaults to the global timeslice. level: Level to compress to. If None, don't compress. operation: Operation to perform ("sum" or "mean"). Defaults to "sum". @@ -151,23 +154,23 @@ def compress_timeslice( ts = TIMESLICE # Raise error if x is not timesliced appropriately - if "timeslice" not in x.dims: + if "timeslice" not in data.dims: raise ValueError("x must have a 'timeslice' dimension.") - if not x.timeslice.reset_coords(drop=True).equals(ts.timeslice): + if not data.timeslice.reset_coords(drop=True).equals(ts.timeslice): raise ValueError("x has incompatible timeslicing.") # If level is not specified, don't compress if level is None: - return x + return data # level must be a valid timeslice level - x_levels = x.timeslice.to_index().names + x_levels = data.timeslice.to_index().names if level not in x_levels: raise ValueError(f"Unknown level: {level}. Must be one of {x_levels}.") # Return x unchanged if already at the desired level - if get_level(x) == level: - return x + if get_level(data) == level: + return data # Prepare mask idx = x_levels.index(level) @@ -182,7 +185,7 @@ def compress_timeslice( # Perform the operation result = ( - (x.unstack(dim="timeslice") * mask) + (data.unstack(dim="timeslice") * mask) .sum(compressed_levels) .stack(timeslice=kept_levels) ) @@ -190,7 +193,7 @@ def compress_timeslice( def expand_timeslice( - x: DataArray, ts: Optional[DataArray] = None, operation: str = "distribute" + data: DataArray, ts: Optional[DataArray] = None, operation: str = "distribute" ) -> DataArray: """Convert a timesliced array to a finer level. @@ -201,7 +204,7 @@ def expand_timeslice( - broadcast: broadcast values across over the new timeslice level(s) Args: - x: Timesliced array to expand. + data: Timesliced array to expand. ts: Dataarray with timeslice lengths. If None, defaults to the global timeslice. operation: Operation to perform ("distribute" or "broadcast"). Defaults to "distribute". @@ -211,12 +214,12 @@ def expand_timeslice( ts = TIMESLICE # Raise error if x is not timesliced - if "timeslice" not in x.dims: + if "timeslice" not in data.dims: raise ValueError("DataArray must have a 'timeslice' dimension.") # Get level names ts_levels = ts.timeslice.to_index().names - x_levels = x.timeslice.to_index().names + x_levels = data.timeslice.to_index().names # Raise error if x_level is not a subset of ts_levels if not set(x_levels).issubset(ts_levels): @@ -226,9 +229,9 @@ def expand_timeslice( # Return x unchanged if already at the desired level finest_level = get_level(ts) - current_level = get_level(x) + current_level = get_level(data) if current_level == finest_level: - return x + return data # Prepare mask mask = ts.unstack(dim="timeslice") @@ -243,7 +246,7 @@ def expand_timeslice( # Perform the operation result = ( - (x.unstack(dim="timeslice") * mask) + (data.unstack(dim="timeslice") * mask) .stack(timeslice=ts_levels) .dropna("timeslice") ) @@ -269,28 +272,41 @@ def get_level(data: DataArray) -> str: def sort_timeslices(data: DataArray, ts: Optional[DataArray] = None) -> DataArray: - """Sorts the timeslices of a DataArray according to a reference timeslice.""" + """Sorts the timeslices of a DataArray according to a reference timeslice. + + This will only sort timeslices to match the reference if the data is at the same + timeslice level as the reference. Otherwise, it will sort timeslices in alphabetical + order. + + Args: + data: Timesliced DataArray to sort. + ts: Dataarray with reference timeslices in the appropriate order + """ if ts is None: ts = TIMESLICE - # If data is at the finest timeslice level, sort timeslices according to ts + # If data is at the same timeslice level as ts, sort timeslices according to ts if get_level(data) == get_level(ts): return data.sel(timeslice=ts.timeslice) # Otherwise, sort timeslices in alphabetical order return data.sortby("timeslice") -def timeslice_max(x: DataArray, ts: Optional[DataArray] = None) -> DataArray: +def timeslice_max(data: DataArray, ts: Optional[DataArray] = None) -> DataArray: """Find the max value over the timeslice dimension, normalized for timeslice length. This first annualizes the value in each timeslice by dividing by the fraction of the year that the timeslice occupies, then takes the maximum value + + Args: + data: Timesliced DataArray to find the max of. + ts: Dataarray with timeslice lengths. If None, defaults to the global timeslice. """ if ts is None: ts = TIMESLICE - timeslice_level = get_level(x) + timeslice_level = get_level(data) timeslice_fractions = compress_timeslice( ts, ts=ts, level=timeslice_level, operation="sum" ) / broadcast_timeslice(ts.sum(), ts=ts, level=timeslice_level) - return (x / timeslice_fractions).max("timeslice") + return (data / timeslice_fractions).max("timeslice") From dff094f1579202aa85998cbc4cb6ce2de412c769 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Mon, 18 Nov 2024 15:06:54 +0000 Subject: [PATCH 88/92] Apply default timeslice level to sector --- src/muse/sectors/sector.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index a4d78a114..bef4e189e 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -111,12 +111,13 @@ def __init__( from muse.interactions import factory as interaction_factory from muse.outputs.sector import factory as ofactory from muse.production import maximum_production + from muse.timeslices import TIMESLICE """Name of the sector.""" self.name: str = name """Timeslice level for the sector (e.g. "month").""" - self.timeslice_level = timeslice_level + self.timeslice_level = timeslice_level or get_level(TIMESLICE) """Subsectors controlled by this object.""" self.subsectors: Sequence[Subsector] = list(subsectors) From 82f58618c18d2c08ac9bd537c87eba836f44ae0c Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Mon, 18 Nov 2024 19:13:05 +0000 Subject: [PATCH 89/92] Fix adhoc bug, add warning --- src/muse/investments.py | 6 +++++- src/muse/timeslices.py | 10 ++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/src/muse/investments.py b/src/muse/investments.py index 39f565cd5..8af7fcfa1 100644 --- a/src/muse/investments.py +++ b/src/muse/investments.py @@ -256,7 +256,11 @@ def adhoc_match_demand( ).where(search_space, 0) capacity = capacity_in_use( - production, technologies, year=year, technology=production.replacement + production, + technologies, + year=year, + technology=production.replacement, + timeslice_level=timeslice_level, ).drop_vars("technology") if "timeslice" in capacity.dims: capacity = timeslice_max(capacity) diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index a89dec86b..067a202c3 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -23,6 +23,7 @@ def read_timeslices( level_names: Sequence[str] = ("month", "day", "hour"), ) -> DataArray: from functools import reduce + from logging import getLogger from toml import loads @@ -31,6 +32,15 @@ def read_timeslices( settings = loads(settings) settings = dict(**settings.get("timeslices", settings)) + # Legacy: warn user about deprecation of "aggregates" feature (#550) + if "aggregates" in settings: + msg = ( + "Timeslice aggregation has been deprecated since v1.3.0. Please see the " + "release notes for that version for more information." + ) + getLogger(__name__).warning(msg) + settings.pop("aggregates") + # Extract level names if "level_names" in settings: level_names = settings.pop("level_names") From 8ca30baa333db6fa3a319fdf4318ca74e1b7d207 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Mon, 18 Nov 2024 19:41:42 +0000 Subject: [PATCH 90/92] Fix error in convert_to_global_timeslicing --- src/muse/sectors/sector.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index bef4e189e..fe812cd7a 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -345,7 +345,7 @@ def convert_to_global_timeslicing(self, market: xr.Dataset) -> xr.Dataset: """Converts market data to global timeslicing.""" supply = expand_timeslice(market["supply"], operation="distribute") consumption = expand_timeslice(market["consumption"], operation="distribute") - costs = expand_timeslice(market["costs"], operation="distribute") + costs = expand_timeslice(market["costs"], operation="broadcast") return xr.Dataset(dict(supply=supply, consumption=consumption, costs=costs)) @property From 07074cddfa7e11ab57d134088197786bb4a9913b Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Thu, 21 Nov 2024 10:16:11 +0000 Subject: [PATCH 91/92] Use modern syntax for type annotations --- src/muse/constraints.py | 14 +++++++------- src/muse/sectors/sector.py | 3 +-- src/muse/sectors/subsector.py | 5 ++--- 3 files changed, 10 insertions(+), 12 deletions(-) diff --git a/src/muse/constraints.py b/src/muse/constraints.py index 785b715cc..559de8eb9 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -72,7 +72,7 @@ def constraints( search_space: xr.DataArray, market: xr.Dataset, technologies: xr.Dataset, - year: Optional[int] = None, + year: int | None = None, **kwargs, ) -> Constraint: pass @@ -248,7 +248,7 @@ def constraints( market: xr.Dataset, technologies: xr.Dataset, year: int | None = None, - timeslice_level: Optional[str] = None, + timeslice_level: str | None = None, ) -> list[Constraint]: if year is None: year = int(market.year.min()) @@ -454,7 +454,7 @@ def max_production( market: xr.Dataset, technologies: xr.Dataset, year: int | None = None, - timeslice_level: Optional[str] = None, + timeslice_level: str | None = None, **kwargs, ) -> Constraint: """Constructs constraint between capacity and maximum production. @@ -519,7 +519,7 @@ def demand_limiting_capacity( market: xr.Dataset, technologies: xr.Dataset, year: int | None = None, - timeslice_level: Optional[str] = None, + timeslice_level: str | None = None, **kwargs, ) -> Constraint: """Limits the maximum combined capacity to match the demand. @@ -735,7 +735,7 @@ def minimum_service( market: xr.Dataset, technologies: xr.Dataset, year: int | None = None, - timeslice_level: Optional[str] = None, + timeslice_level: str | None = None, **kwargs, ) -> Constraint | None: """Constructs constraint between capacity and minimum service.""" @@ -778,7 +778,7 @@ def minimum_service( def lp_costs( - technologies: xr.Dataset, costs: xr.DataArray, timeslice_level: Optional[str] = None + technologies: xr.Dataset, costs: xr.DataArray, timeslice_level: str | None = None ) -> xr.Dataset: """Creates costs for solving with scipy's LP solver. @@ -1159,7 +1159,7 @@ def factory( technologies: xr.Dataset, costs: xr.DataArray, *constraints: Constraint, - timeslice_level: Optional[str] = None, + timeslice_level: str | None = None, ) -> ScipyAdapter: lpcosts = lp_costs(technologies, costs, timeslice_level=timeslice_level) diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index fe812cd7a..b2a999ad4 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -4,7 +4,6 @@ from typing import ( Any, Callable, - Optional, cast, ) @@ -106,7 +105,7 @@ def __init__( interpolation: str = "linear", outputs: Callable | None = None, supply_prod: PRODUCTION_SIGNATURE | None = None, - timeslice_level: Optional[str] = None, + timeslice_level: str | None = None, ): from muse.interactions import factory as interaction_factory from muse.outputs.sector import factory as ofactory diff --git a/src/muse/sectors/subsector.py b/src/muse/sectors/subsector.py index 784397124..381a49b2f 100644 --- a/src/muse/sectors/subsector.py +++ b/src/muse/sectors/subsector.py @@ -4,7 +4,6 @@ from typing import ( Any, Callable, - Optional, ) import numpy as np @@ -27,7 +26,7 @@ def __init__( name: str = "subsector", forecast: int = 5, expand_market_prices: bool = False, - timeslice_level: Optional[str] = None, + timeslice_level: str | None = None, ): from muse import constraints as cs from muse import demand_share as ds @@ -126,7 +125,7 @@ def factory( regions: Sequence[str] | None = None, current_year: int | None = None, name: str = "subsector", - timeslice_level: Optional[str] = None, + timeslice_level: str | None = None, ) -> Subsector: from muse import constraints as cs from muse import demand_share as ds From 220fb2795f7c45988cdae3c9019db28217369953 Mon Sep 17 00:00:00 2001 From: Tom Bland Date: Tue, 26 Nov 2024 13:59:06 +0000 Subject: [PATCH 92/92] Address reviewer comments --- src/muse/agents/agent.py | 1 + src/muse/sectors/sector.py | 8 ++----- src/muse/timeslices.py | 45 ++++++++++++++++++++++++-------------- 3 files changed, 32 insertions(+), 22 deletions(-) diff --git a/src/muse/agents/agent.py b/src/muse/agents/agent.py index 474212298..db0e8a539 100644 --- a/src/muse/agents/agent.py +++ b/src/muse/agents/agent.py @@ -62,6 +62,7 @@ def __init__( self.quantity = quantity """Attribute to classify different agents' share of the population.""" self.timeslice_level = timeslice_level + """Timeslice level for the agent.""" def filter_input( self, diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index b2a999ad4..26e721606 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -153,9 +153,7 @@ def __init__( :py:mod:`muse.interactions` contains MUSE's base interactions """ - if interactions is None: - interactions = interaction_factory() - self.interactions = interactions + self.interactions = interactions or interaction_factory() """A function for outputting data for post-mortem analysis.""" self.outputs: Callable = ( @@ -167,9 +165,7 @@ def __init__( It can be anything registered with :py:func:`@register_production`. """ - self.supply_prod = ( - supply_prod if supply_prod is not None else maximum_production - ) + self.supply_prod = supply_prod or maximum_production """Full supply, consumption and costs data for the most recent year.""" self.output_data: xr.Dataset diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index 067a202c3..12ca36c3d 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -6,6 +6,11 @@ "distribute_timeslice", "drop_timeslice", "setup_module", + "compress_timeslice", + "expand_timeslice", + "get_level", + "sort_timeslices", + "timeslice_max", ] from collections.abc import Mapping, Sequence @@ -81,7 +86,8 @@ def broadcast_timeslice( ) -> DataArray: """Convert a non-timesliced array to a timesliced array by broadcasting. - If x is already timesliced in the appropriate scheme, it will be returned unchanged. + If data is already timesliced in the appropriate scheme, it will be returned + unchanged. Args: data: Array to broadcast. @@ -97,11 +103,13 @@ def broadcast_timeslice( if level is not None: ts = compress_timeslice(ts, ts=ts, level=level, operation="sum") - # If x already has timeslices, check that it matches the reference timeslice. + # If data already has timeslices, check that it matches the reference timeslice. if "timeslice" in data.dims: if data.timeslice.reset_coords(drop=True).equals(ts.timeslice): return data - raise ValueError("x has incompatible timeslicing.") + raise ValueError( + "Data is already timesliced, but does not match the reference." + ) mindex_coords = Coordinates.from_pandas_multiindex(ts.timeslice, "timeslice") broadcasted = data.expand_dims(timeslice=ts["timeslice"]).assign_coords( @@ -115,7 +123,8 @@ def distribute_timeslice( ) -> DataArray: """Convert a non-timesliced array to a timesliced array by distribution. - If x is already timesliced in the appropriate scheme, it will be returned unchanged. + If data is already timesliced in the appropriate scheme, it will be returned + unchanged. Args: data: Array to distribute. @@ -129,11 +138,13 @@ def distribute_timeslice( if level is not None: ts = compress_timeslice(ts, ts=ts, level=level, operation="sum") - # If x already has timeslices, check that it matches the reference timeslice. + # If data already has timeslices, check that it matches the reference timeslice. if "timeslice" in data.dims: if data.timeslice.reset_coords(drop=True).equals(ts.timeslice): return data - raise ValueError("x has incompatible timeslicing.") + raise ValueError( + "Data is already timesliced, but does not match the reference." + ) broadcasted = broadcast_timeslice(data, ts=ts) timeslice_fractions = ts / broadcast_timeslice(ts.sum(), ts=ts) @@ -163,11 +174,11 @@ def compress_timeslice( if ts is None: ts = TIMESLICE - # Raise error if x is not timesliced appropriately + # Raise error if data is not timesliced appropriately if "timeslice" not in data.dims: - raise ValueError("x must have a 'timeslice' dimension.") + raise ValueError("Data must have a 'timeslice' dimension.") if not data.timeslice.reset_coords(drop=True).equals(ts.timeslice): - raise ValueError("x has incompatible timeslicing.") + raise ValueError("Data has incompatible timeslicing with reference.") # If level is not specified, don't compress if level is None: @@ -178,7 +189,7 @@ def compress_timeslice( if level not in x_levels: raise ValueError(f"Unknown level: {level}. Must be one of {x_levels}.") - # Return x unchanged if already at the desired level + # Return data unchanged if already at the desired level if get_level(data) == level: return data @@ -223,21 +234,23 @@ def expand_timeslice( if ts is None: ts = TIMESLICE - # Raise error if x is not timesliced + # Raise error if data is not timesliced if "timeslice" not in data.dims: - raise ValueError("DataArray must have a 'timeslice' dimension.") + raise ValueError("Data must have a 'timeslice' dimension.") # Get level names ts_levels = ts.timeslice.to_index().names x_levels = data.timeslice.to_index().names - # Raise error if x_level is not a subset of ts_levels + # Raise error if x_levels is not a subset of ts_levels if not set(x_levels).issubset(ts_levels): raise ValueError( - f"Timeslice levels of x ({x_levels}) must be a subset of ts ({ts_levels})." + "Data has incompatible timeslicing with reference. " + f"Timeslice levels of data ({x_levels}) must be a subset of ts " + f"({ts_levels})." ) - # Return x unchanged if already at the desired level + # Return data unchanged if already at the desired level finest_level = get_level(ts) current_level = get_level(data) if current_level == finest_level: @@ -277,7 +290,7 @@ def drop_timeslice(data: DataArray) -> DataArray: def get_level(data: DataArray) -> str: """Get the timeslice level of a DataArray.""" if "timeslice" not in data.dims: - raise ValueError("DataArray does not have a 'timeslice' dimension.") + raise ValueError("Data does not have a 'timeslice' dimension.") return data.timeslice.to_index().names[-1]