diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 9b5beed23..1d8064b96 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.2.3 +current_version = 1.3.0 commit = True tag = True diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 35fb82e7d..97544f500 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,7 +16,7 @@ jobs: matrix: os: [ubuntu-latest, macos-latest, windows-latest] # Test with the earliest and the latest python versions supported - python-version: ["3.9", "3.12"] + python-version: ["3.9", "3.13"] steps: - uses: actions/checkout@v4 @@ -59,7 +59,7 @@ jobs: matrix: os: [ubuntu-latest, macos-latest, windows-latest] # Test with the earliest and the latest python versions supported - python-version: ["3.9", "3.12"] + python-version: ["3.9", "3.13"] steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index c4d14b92c..676e9a86c 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -13,7 +13,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: 3.12 + python-version: 3.13 - name: Install dependencies run: | sudo apt update -y diff --git a/CHANGELOG.md b/CHANGELOG.md deleted file mode 100644 index 457269720..000000000 --- a/CHANGELOG.md +++ /dev/null @@ -1,91 +0,0 @@ -# Version - Development - -## Features - -- Formatting commodity names in technodata enduse ([#237](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/237)) -- Separate publish workflow and add upload of executable to release ([#231](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/231)) -- Update main branch ([#225](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/225)) -- Notebooks in the documentation are run as tests ([#178](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/178)) -- Tutorials in the documentation are run as tests ([#177](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/177)) -- Clean notebooks before running them as tests in the documentaition ([#173](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/173)) -- Add technology-granularity to sectoral outputs ([#214](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/214)) -- Add description to bisection method ([#149](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/149)) -- Add description to demo cases ([#139](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/139)) -- Run link-checker only once a week ([#148](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/148)) -- Update documentation for installing MUSE ([#138](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/138)) -- Updating pyproject.toml with valid python versions ([#121](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/121)) -- Expand CI workflow ([#119](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/119)) -- Convert setup.cfg to pyproject.toml and add entrypoint ([#118](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/118)) -- Add GitHub action to check for broken links ([#115](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/115)) -- Inconsistent trade case study ([#58](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/58)) -- Update README.txt in installation path ([#57](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/57)) -- Add tests to check the case of not using retrofit agents ([#53](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/53)) -- Add a tutorial for caching quantities and fix a bug in the caching pipeline ([#52](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/52)) -- Update documentation about not using retrofit ([51](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/51)) -- Add error about not finding interaction network ([50](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/50)) -- Add option for `standard_demand` and remove retro agents ([#35](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/35)) -- Update main version ([#28](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/28)) -- Update main version ([#26](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/26)) -- Adds version numbering ([#21](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/21)) -- Adds trade tutorial to the documentation ([#16](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/16)) -- Adds error messages ([#42](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/42)) -- Adds cache of quantities ([#15](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/15)) -- Adds trade tutorial to the documentation -- Updated branch names on pipelines ([#9](https://github.com/EnergySystemsModellingLab/MUSE_OS/issues/9)) -- Edited default package name from StarMUSE to MUSE ([#4](https://github.com/EnergySystemsModellingLab/MUSE_OS/issues/4)) -- Added new cases studies with multiple agents and spend limit ([#1](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/1)) -- Updates the model and the documentation to use the most recent version of MUSE - ([#964](https://github.com/EnergySystemsModellingLab/StarMuse/pull/964)) -- Updates the model and the documentation to use the most recent version of MUSE - ([#963](https://github.com/EnergySystemsModellingLab/StarMuse/pull/963)) -- Updates the documentation to use the most recent version of MUSE - ([#922](https://github.com/EnergySystemsModellingLab/StarMuse/pull/922)) -- Updates the documentation to provide information on adhoc and scipy solvers as well as correctly defines ObjSort direction for minimisation/maximisation ([#949](https://github.com/EnergySystemsModellingLab/StarMuse/pull/949)) -- Introduces a check on the type of the data defining the objectives (string for Objective, float/int for ObjData, and Boolean for Objsort) ([#945](https://github.com/EnergySystemsModellingLab/StarMuse/issues/945])) -- Updates the documentation to use the most recent version of MUSE ([#922](https://github.com/EnergySystemsModellingLab/StarMuse/pull/922)) -- Improves the CI system, with a more thorough pre-commit hooks and QA - ([#917](https://github.com/EnergySystemsModellingLab/StarMuse/pull/917)) -- Introduces the CHANGELOG file and PR template - ([#916](https://github.com/EnergySystemsModellingLab/StarMuse/pull/916)) - -## Optimizations - -- None - -## Bug fixes - -- Update variable costs definition ([#247](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/247)) -- Solves missing forecast update in agents ([#212](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/212)) -- Solves missing file in tutorial 5 ([#182](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/182)) -- Solves missing subsectors in documentation ([#146](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/146)) -- Solves broken links in the documentation ([#137](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/137)) -- NPV fails to align timeslices ([#135]) -- OS case studies had obsolete output function removed ([#101]) -- Growth constraints with destination and source region ([#96](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/96)) -- Harmonise time dimension for asset interpolation before merging them ([#94](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/94)) -- Error in costs reporting ([#92](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/92)) -- Error in supply/consumption reporting ([#70](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/70)) -- Added destination region in output ([#64](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/64)) -- Consistency in packages use ([#38](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/56)) -- Raise error for inconsistent commodities ([#38](https://github.com/EnergySystemsModellingLab/MUSE_OS/issues/38)) -- Fix error in black ([#32](https://github.com/EnergySystemsModellingLab/MUSE_OS/pull/32)) -- Fixes the dead links in the documentation now that the repository is open-sourced ([#3](https://github.com/EnergySystemsModellingLab/MUSE_OS/issues/3)) -- Ensures that the adhoc and scipy solvers require the same input in the agents file to minimise and maximise. Specifically, both solvers now require TRUE for minimisation and FALSE for maximisation ([#845](https://github.com/EnergySystemsModellingLab/StarMuse/issues/845)) -- Update the documentation to include a tutorial for implementing trade. -- Update the documentation on adding spend limit constraint description ([#941](https://github.com/EnergySystemsModellingLab/StarMuse/issues/941)) -- Fix typos in CHANGELOG file ([#939](https://github.com/EnergySystemsModellingLab/StarMuse/pull/939)) -- Specify error message for no commodity outputs ([#937](https://github.com/EnergySystemsModellingLab/StarMuse/issues/937)) -- Update the documentation on index redundancies ([#936](https://github.com/EnergySystemsModellingLab/StarMuse/issues/936)) -- Update the documentation on timeslice file removed ([#935](https://github.com/EnergySystemsModellingLab/StarMuse/issues/935)) -- Update the documentation on commodity types ([#934](https://github.com/EnergySystemsModellingLab/StarMuse/issues/934)) -- Update the documentation on regions file removed ([#933](https://github.com/EnergySystemsModellingLab/StarMuse/issues/933)) -- Update the documentation on commodity definition ([#932](https://github.com/EnergySystemsModellingLab/StarMuse/issues/932)) -- Update the documentation on agent share ([#931](https://github.com/EnergySystemsModellingLab/StarMuse/issues/931)) -- Update the documentation on equations displayed ([#930](https://github.com/EnergySystemsModellingLab/StarMuse/issues/930)) -- Update the documentation to correctly add an agent ([#927](https://github.com/EnergySystemsModellingLab/StarMuse/issues/927)) -- Comfort objective modified to keep asset dimensions ([#926](https://github.com/EnergySystemsModellingLab/StarMuse/pull/926)) -- Update the documentation on growth rate ([#923](https://github.com/EnergySystemsModellingLab/StarMuse/issues/923)) - -## Breaking changes - -- Flake8 update on yaml ([#985](https://github.com/EnergySystemsModellingLab/StarMuse/pull/985)) diff --git a/CITATION.cff b/CITATION.cff index 33f15e1ef..7cb7f18fa 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -9,5 +9,5 @@ authors: given-names: Adam title: MUSE_OS -version: v1.2.3 -date-released: 2024-11-19 +version: v1.3.0 +date-released: 2024-12-03 diff --git a/docs/api.rst b/docs/api.rst index 1d1828be3..0d47458d6 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -55,11 +55,6 @@ PresetSector .. autoclass:: muse.sectors.preset_sector.PresetSector :members: -LegacySector -~~~~~~~~~~~~ - -.. autoclass:: muse.sectors.legacy_sector.LegacySector - :members: Production ~~~~~~~~~~ diff --git a/docs/application-flow.rst b/docs/application-flow.rst index 5774c6e49..f25c24645 100644 --- a/docs/application-flow.rst +++ b/docs/application-flow.rst @@ -503,11 +503,9 @@ The dispatch stage when running a sector can be described by the following graph After the investment stage is completed, then the new capacity of the sector is obtained by aggregating the assets of all agents of the sector. Then, the supply of commodities is calculated as requested by the ``dispatch_production`` argument defined for each sector in the ``settings.toml`` file. -The typical choice used in most examples in MUSE is ``share``, where the utilization across similar assets is the same in percentage. However, there are other options available, like - -- ``costed``: assets are ranked by their levelised costs and the cheaper ones are allowed to service the demand first up to their maximum production. Minimum service can be imposed if present. +There are two possible options for ``dispatch_production`` built into MUSE: +- ``share``: assets each supply a proportion of demand based on their share of total capacity. - ``maximum``: all the assets dispatch their maximum production, regardless of the demand. -- ``match``: supply matches the demand within the constrains on how much an asset can produce while minimizing the overall associated costs. ``match`` allows the choice between different metrics to rank assets, such as levelised costs and gross margin. See :py:mod:`muse.demand_matching` for the mathematical details. Once the supply is obtained, the consumed commodities required to achieve that production level are calculated. The cheapest fuel for flexible technologies is used. diff --git a/docs/conf.py b/docs/conf.py index 1cb20aece..de5fa793b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -11,7 +11,7 @@ project = "MUSE" copyright = "2024, Imperial College London" author = "Imperial College London" -release = "1.2.3" +release = "1.3.0" version = ".".join(release.split(".")[:2]) # -- General configuration --------------------------------------------------- @@ -63,7 +63,7 @@ # -- Options for HTML output ------------------------------------------------- -html_theme = "classic" +html_theme = "sphinx_rtd_theme" # -- Render GitHub links ------------------------------------------------- diff --git a/docs/inputs/toml.rst b/docs/inputs/toml.rst index 6ebaf6dad..66241f1e2 100644 --- a/docs/inputs/toml.rst +++ b/docs/inputs/toml.rst @@ -236,30 +236,6 @@ levels. For instance, there no ``peak`` periods during weekends. All that matter that the relative weights (i.e. the number of hours) are consistent and sum up to a year. -The input above defines the finest times slice in the code. In order to define rougher -timeslices we can introduce items in each levels that represent aggregates at that -level. By default, we have the following: - -.. code-block:: TOML - - [timeslices.aggregates] - all-day = ["night", "morning", "afternoon", "early-peak", "late-peak", "evening"] - all-week = ["weekday", "weekend"] - all-year = ["winter", "summer", "spring-autumn"] - -Here, ``all-day`` aggregates the full day. However, one could potentially create -aggregates such as: - -.. code-block:: TOML - - [timeslices.aggregates] - daylight = ["morning", "afternoon", "early-peak", "late-peak"] - nightlife = ["evening", "night"] - - It is possible to specify a timeslice level for the mca by adding an -`mca.timeslice_levels` section, using an inline table format. -See section on `Timeslices_`. - *outputs_cache* This option behaves exactly like `outputs` for sectors and accepts the same options but controls the output of cached quantities instead. This option is NOT available for @@ -327,41 +303,47 @@ A sector accepts these attributes: .. _scipy method's kind attribute: https://docs.scipy.org/doc/scipy/reference/generated/scipy.interpolate.interp1d.html -*investment_production* - In its simplest form, this is the name of a method to compute the production from a - sector, as used when splitting the demand across agents. In other words, this is the - computation of the production which affects future investments. In it's more general - form, *production* can be a subsection of its own, with a "name" attribute. For - instance: - - .. code-block:: TOML - - [sectors.residential.production] - name = "match" - costing = "prices" +*dispatch_production* + The method used to calculate supply of commodities after investments have been made. MUSE provides two methods in :py:mod:`muse.production`: - - share: the production is the maximum production for the existing capacity and + - share: assets each supply a proportion of demand based on their share of total + capacity + - maximum: the production is the maximum production for the existing capacity and the technology's utilization factor. See :py:func:`muse.production.maximum_production`. - - match: production and demand are matched according to a given cost metric. The - cost metric defaults to "prices". It can be modified by using the general form - given above, with a "costing" attribute. The latter can be "prices", - "gross_margin", or "lcoe". - See :py:func:`muse.production.demand_matched_production`. - - *production* can also refer to any custom production method registered with MUSE via - :py:func:`muse.production.register_production`. Defaults to "share". -*dispatch_production* - The name of the production method used to compute the sector's output, as returned - to the muse market clearing algorithm. In other words, this is computation of the - production method which will affect other sectors. + Additional methods can be registered with + :py:func:`muse.production.register_production` - It has the same format and options as the *production* attribute above. +*technodata* + Path to a csv file containing the characterization of the technologies involved in + the sector, e.g. lifetime, capital costs, etc... See :ref:`inputs-technodata`. + +*technodata_timeslices* + Optional. Path to a csv file describing the utilization factor and minimum service + factor of each technology in each timeslice. + See :ref:`user_guide/inputs/technodata_timeslices`. + +*commodities_in* + Path to a csv file describing the inputs of each technology involved in the sector. + See :ref:`inputs-iocomms`. + +*commodities_out* + Path to a csv file describing the outputs of each technology involved in the sector. + See :ref:`inputs-iocomms`. + +*timeslice_level* + Optional. This represents the level of timeslice granularity over which commodity + flows out of the sector are balanced (e.g. if "day", the sector will aim to meet + commodity demands on a daily basis, rather than an hourly basis). + If not given, defaults to the finest level defined in the global `timeslices` section. + Note: If *technodata_timeslices* is used, the data in this file must match the timeslice + level of the sector (e.g. with global timeslice levels "month", "day" and "hour", if a sector has "day" as + the timeslice level, then *technodata_timeslices* must have columns "month" and "day", but not "hour") Sectors contain a number of subsections: *interactions* @@ -416,56 +398,6 @@ Sectors contain a number of subsections: "new_to_retro" type of network has been defined but no retro agents are included in the sector. -*technodata* - - Defines technologies and their features, in terms of costs, efficiencies, and emissions. - - *technodata* are specified as an inline TOML table, e.g. with single - brackets. A technodata section would look like: - - .. code-block:: TOML - - [sectors.residential.technodata] - technodata = '{path}/technodata/residential/Technodata.csv' - commodities_in = '{path}/technodata/residential/CommIn.csv' - commodities_out = '{path}/technodata/residential/CommOut.csv' - - Where: - *technodata* - Path to a csv file containing the characterization of the technologies involved in - the sector, e.g. lifetime, capital costs, etc... See :ref:`inputs-technodata`. - - *commodities_in* - Path to a csv file describing the inputs of each technology involved in the sector. - See :ref:`inputs-iocomms`. - - *commodities_out* - Path to a csv file describing the outputs of each technology involved in the sector. - See :ref:`inputs-iocomms`. - - Once the finest timeslice and its aggregates are given, it is possible for each sector -to define the timeslice simply by referring to the slices it will use at each level. - -.. _sector-timeslices: - -*timeslice_levels* - Optional. These define the timeslices of a sector. If not specified, the finest timeslice levels will be used - (See `Timeslices`_). - It can be implemented with the following rows: - -.. code-block:: TOML - - [sectors.some_sector.timeslice_levels] - day = ["daylight", "nightlife"] - month = ["all-year"] - - Above, ``sectors.some_sector.timeslice_levels.week`` defaults its value in the finest - timeslice. Indeed, if the subsection ``sectors.some_sector.timeslice_levels`` is not - given, then the sector will default to using the finest timeslices. - - If the MCA uses a rougher - timeslice framework, the market will be expressed within it. Hence information from - sectors with a finer timeslice framework will be lost. *subsectors* @@ -510,12 +442,12 @@ to define the timeslice simply by referring to the slices it will use at each le are registered via :py:func:`~muse.investments.register_investment`. At time of writing, three are available: + - "scipy" solver (default from v1.3): Formulates investment as a true LP problem and solves it using + the `scipy solver`_. + - an "adhoc" solver: Simple in-house solver that ranks the technologies according to cost and service the demand incrementally. - - "scipy" solver: Formulates investment as a true LP problem and solves it using - the `scipy solver`_. - - "cvxopt" solver: Formulates investment as a true LP problem and solves it using the python package `cvxopt`_. `cvxopt`_ is *not* installed by default. Users can install it with ``pip install cvxopt`` or ``conda install cvxopt``. @@ -731,58 +663,3 @@ The following attributes are accepted: filters.region = ["USA", "ASEA"] filters.commodity = ["algae", "fluorescent light"] - - --------------- -Legacy Sectors --------------- - -Legacy sectors wrap sectors developed for a previous version of MUSE to the open-source -version. - -Preset sectors are defined in :py:class:`~muse.sectors.PresetSector`. - -The can be defined in the TOML file as follows: - -.. code-block:: TOML - - [global_input_files] - macrodrivers = '{path}/input/Macrodrivers.csv' - regions = '{path}/input/Regions.csv' - global_commodities = '{path}/input/MUSEGlobalCommodities.csv' - - [sectors.Industry] - type = 'legacy' - priority = 'demand' - agregation_level = 'month' - excess = 0 - - userdata_path = '{muse_sectors}/Industry' - technodata_path = '{muse_sectors}/Industry' - timeslices_path = '{muse_sectors}/Industry/TimeslicesIndustry.csv' - output_path = '{path}/output' - -For historical reasons, the three `global_input_files` above are required. The sector -itself can use the following attributes. - -*type* - See the attribute in the standard mode, :ref:`type`. *Legacy* sectors - are those with type "legacy". - -*priority* - See the attribute in the standard mode, :ref:`priority`. - -*agregation_level* - Information relevant to the sector's timeslice. - -*excess* - Excess factor used to model early obsolescence. - -*userdata_path* - Path to a directory with sector-specific data files. - -*technodata_path* - Path to a technodata CSV file. See. :ref:`inputs-technodata`. - -*output_path* - Path to a directory where the sector will write output files. diff --git a/docs/installation/pipx-based.rst b/docs/installation/pipx-based.rst index 880c28405..4aed5c30d 100644 --- a/docs/installation/pipx-based.rst +++ b/docs/installation/pipx-based.rst @@ -6,7 +6,7 @@ pipx-based installation To help you installing MUSE in your system we will follow these steps: - `Launching a terminal`_: Needed to both install and run MUSE. -- `Installing a compatible Python version`_: MUSE works with Python 3.9 to 3.12. +- `Installing a compatible Python version`_: MUSE works with Python 3.9 to 3.13. - `Installing pipx`_: A Python application manager that facilitates installing, keeping applications updated and run them in their own isolated environments. - `Installing MUSE itself`_ @@ -67,7 +67,7 @@ Once you have launched the Terminal, the window that opens will show the command Installing a compatible Python version ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -MUSE needs Python to run and it works with versions 3.9 to 3.12, so the next step is to install a suitable version of Python. +MUSE needs Python to run and it works with versions 3.9 to 3.13, so the next step is to install a suitable version of Python. .. note:: diff --git a/docs/release-notes/index.rst b/docs/release-notes/index.rst index e7dc3fc34..317189174 100644 --- a/docs/release-notes/index.rst +++ b/docs/release-notes/index.rst @@ -6,6 +6,7 @@ This is the list of changes to MUSE between each release. .. toctree:: :maxdepth: 1 + v1.3.0 v1.2.3 v1.2.2 v1.2.1 diff --git a/docs/release-notes/v1.3.0.md b/docs/release-notes/v1.3.0.md new file mode 100644 index 000000000..65da60301 --- /dev/null +++ b/docs/release-notes/v1.3.0.md @@ -0,0 +1,48 @@ +# Release notes for MUSE v1.3.0 (December 3, 2024) + +## New `timeslice_level` parameter ({github}`550`) + +- Users can now specify a timeslice level for each sector using the new `timeslice_level` parameter (for example `timeslice_level = "month"`). +- This is the level of timeslice granularity over which commodity flows out of a sector are balanced with demand. For example, a user modelling an oil sector may wish to specify that oil supply must match demand on a monthly basis, but not necessarily in every hour (as oil can be stored). +- See documentation for this parameter [here](https://muse-os.readthedocs.io/en/latest/inputs/toml.html#standard-sectors). More details can also be found at {github}`550`. +- Note: This supersedes the timeslice aggregation feature in previous versions of MUSE. If you have a `timeslices.aggregates` section in your settings file, this should be deleted, otherwise a warning will be raised. + +## Default solver ({github}`587`) + +- The default solver has changed to "scipy" (previously "adhoc") +- If the `lpsolver` field is left blank for any subsector, a warning will be raised and "scipy" will be used. If you wish to use the "adhoc" solver, you __must__ specify `lpsolver = "adhoc"`. +- Users are encouraged to use the "scipy" solver for all subsectors, unless memory limits are reached (see {github}`389`) + +## Added support for Python 3.13 ({github}`564`) + +- MUSE now works with Python versions 3.9 to 3.13 +- Python 3.9 reaches its end of life [next year](https://devguide.python.org/versions/), so if you are currently using Python 3.9, now is a good opportunity to upgrade + +## Bug fixes + +- A major error with the "comfort", "efficiency", "capital_costs" and "ALCOE" objectives has been fixed ({github}`581`). +- Processing of the `AgentShare` parameter (agents file and technodata files) has changed to enforce consistency between files ({github}`586`). If you receive a new `AgentShareNotDefined` error, please double check that your `AgentShare` names are consistent between your agents and techodata files. See updated documentation [here](https://muse-os.readthedocs.io/en/latest/inputs/technodata.html). + +## Deprecated features + +### Outputs ({github}`548`) + +- The following outputs have been deleted and can no longer be used: "timeslice_supply", "metric_supply", "yearly_supply", "metricy_supply", "timeslice_consumption", "metric_consumption", "yearly_consumption", "metricy_consumption" +- Any attempt to use these outputs will raise an error +- If you need detailed supply/consumption data for any sector, please use the "supply" and "consumption" outputs within each sector, for example (replacing `SECTOR_NAME` with the name of the sector): + + ```toml + [[sectors.SECTOR_NAME.outputs]] + filename = '{cwd}/{default_output_dir}/{Sector}/{Quantity}.csv' + quantity = "supply" + sink = "aggregate" + ``` + +### Production methods ({github}`557` and {github}`559`) + +- The "match" and "costed" options for the `dispatch_production` parameter have been removed +- Please see [here](https://muse-os.readthedocs.io/en/latest/inputs/toml.html#standard-sectors) for updated documentation on this parameter + +### Legacy sectors ({github}`510`) + +- Sectors with `type = "legacy"` are no longer compatible diff --git a/docs/source/muse.sectors.rst b/docs/source/muse.sectors.rst index ed2f2fab3..c6f0db570 100644 --- a/docs/source/muse.sectors.rst +++ b/docs/source/muse.sectors.rst @@ -12,14 +12,6 @@ muse.sectors.abstract module :undoc-members: :show-inheritance: -muse.sectors.legacy\_sector module ----------------------------------- - -.. automodule:: muse.sectors.legacy_sector - :members: - :undoc-members: - :show-inheritance: - muse.sectors.preset\_sector module ---------------------------------- diff --git a/docs/tutorial-code/add-agent/1-single-objective/settings.toml b/docs/tutorial-code/add-agent/1-single-objective/settings.toml index 981977384..8c508b8cd 100644 --- a/docs/tutorial-code/add-agent/1-single-objective/settings.toml +++ b/docs/tutorial-code/add-agent/1-single-objective/settings.toml @@ -41,7 +41,7 @@ commodities_out = '{path}/technodata/residential/CommOut.csv' [sectors.residential.subsectors.all] agents = '{path}/technodata/Agents.csv' existing_capacity = '{path}/technodata/residential/ExistingCapacity.csv' -lpsolver = "scipy" # Optional, defaults to "adhoc" +lpsolver = "scipy" # Optional, defaults to "scipy" constraints = [ # Optional, defaults to the constraints below "max_production", diff --git a/docs/tutorial-code/add-agent/2-multiple-objective/settings.toml b/docs/tutorial-code/add-agent/2-multiple-objective/settings.toml index 981977384..8c508b8cd 100644 --- a/docs/tutorial-code/add-agent/2-multiple-objective/settings.toml +++ b/docs/tutorial-code/add-agent/2-multiple-objective/settings.toml @@ -41,7 +41,7 @@ commodities_out = '{path}/technodata/residential/CommOut.csv' [sectors.residential.subsectors.all] agents = '{path}/technodata/Agents.csv' existing_capacity = '{path}/technodata/residential/ExistingCapacity.csv' -lpsolver = "scipy" # Optional, defaults to "adhoc" +lpsolver = "scipy" # Optional, defaults to "scipy" constraints = [ # Optional, defaults to the constraints below "max_production", diff --git a/docs/tutorial-code/add-correlation-demand/1-correlation/settings.toml b/docs/tutorial-code/add-correlation-demand/1-correlation/settings.toml index 82d722630..ab825a656 100644 --- a/docs/tutorial-code/add-correlation-demand/1-correlation/settings.toml +++ b/docs/tutorial-code/add-correlation-demand/1-correlation/settings.toml @@ -41,7 +41,7 @@ commodities_out = '{path}/technodata/residential/CommOut.csv' [sectors.residential.subsectors.all] agents = '{path}/technodata/Agents.csv' existing_capacity = '{path}/technodata/residential/ExistingCapacity.csv' -lpsolver = "scipy" # Optional, defaults to "adhoc" +lpsolver = "scipy" # Optional, defaults to "scipy" constraints = [ # Optional, defaults to the constraints below "max_production", diff --git a/docs/tutorial-code/add-new-technology/1-introduction/settings.toml b/docs/tutorial-code/add-new-technology/1-introduction/settings.toml index b938346e1..25c2d0c85 100644 --- a/docs/tutorial-code/add-new-technology/1-introduction/settings.toml +++ b/docs/tutorial-code/add-new-technology/1-introduction/settings.toml @@ -41,7 +41,7 @@ commodities_out = '{path}/technodata/residential/CommOut.csv' [sectors.residential.subsectors.all] agents = '{path}/technodata/Agents.csv' existing_capacity = '{path}/technodata/residential/ExistingCapacity.csv' -lpsolver = "scipy" # Optional, defaults to "adhoc" +lpsolver = "scipy" # Optional, defaults to "scipy" constraints = [ # Optional, defaults to the constraints below "max_production", diff --git a/docs/tutorial-code/add-new-technology/2-scenario/settings.toml b/docs/tutorial-code/add-new-technology/2-scenario/settings.toml index b938346e1..25c2d0c85 100644 --- a/docs/tutorial-code/add-new-technology/2-scenario/settings.toml +++ b/docs/tutorial-code/add-new-technology/2-scenario/settings.toml @@ -41,7 +41,7 @@ commodities_out = '{path}/technodata/residential/CommOut.csv' [sectors.residential.subsectors.all] agents = '{path}/technodata/Agents.csv' existing_capacity = '{path}/technodata/residential/ExistingCapacity.csv' -lpsolver = "scipy" # Optional, defaults to "adhoc" +lpsolver = "scipy" # Optional, defaults to "scipy" constraints = [ # Optional, defaults to the constraints below "max_production", diff --git a/docs/tutorial-code/add-region/1-new-region/settings.toml b/docs/tutorial-code/add-region/1-new-region/settings.toml index d0e5557d2..6dfa11930 100644 --- a/docs/tutorial-code/add-region/1-new-region/settings.toml +++ b/docs/tutorial-code/add-region/1-new-region/settings.toml @@ -41,7 +41,7 @@ commodities_out = '{path}/technodata/residential/CommOut.csv' [sectors.residential.subsectors.all] agents = '{path}/technodata/Agents.csv' existing_capacity = '{path}/technodata/residential/ExistingCapacity.csv' -lpsolver = "scipy" # Optional, defaults to "adhoc" +lpsolver = "scipy" # Optional, defaults to "scipy" constraints = [ # Optional, defaults to the constraints below "max_production", diff --git a/docs/tutorial-code/add-service-demand/1-exogenous-demand/settings.toml b/docs/tutorial-code/add-service-demand/1-exogenous-demand/settings.toml index 981977384..8c508b8cd 100644 --- a/docs/tutorial-code/add-service-demand/1-exogenous-demand/settings.toml +++ b/docs/tutorial-code/add-service-demand/1-exogenous-demand/settings.toml @@ -41,7 +41,7 @@ commodities_out = '{path}/technodata/residential/CommOut.csv' [sectors.residential.subsectors.all] agents = '{path}/technodata/Agents.csv' existing_capacity = '{path}/technodata/residential/ExistingCapacity.csv' -lpsolver = "scipy" # Optional, defaults to "adhoc" +lpsolver = "scipy" # Optional, defaults to "scipy" constraints = [ # Optional, defaults to the constraints below "max_production", diff --git a/docs/tutorial-code/carbon-budget/1-carbon-budget/settings.toml b/docs/tutorial-code/carbon-budget/1-carbon-budget/settings.toml index 24815755c..ba5fac9c7 100644 --- a/docs/tutorial-code/carbon-budget/1-carbon-budget/settings.toml +++ b/docs/tutorial-code/carbon-budget/1-carbon-budget/settings.toml @@ -54,7 +54,7 @@ commodities_out = '{path}/technodata/residential/CommOut.csv' [sectors.residential.subsectors.all] agents = '{path}/technodata/Agents.csv' existing_capacity = '{path}/technodata/residential/ExistingCapacity.csv' -lpsolver = "scipy" # Optional, defaults to "adhoc" +lpsolver = "scipy" # Optional, defaults to "scipy" constraints = [ # Optional, defaults to the constraints below "max_production", diff --git a/docs/tutorial-code/min-max-timeslice-constraints/1-min-constraint/settings.toml b/docs/tutorial-code/min-max-timeslice-constraints/1-min-constraint/settings.toml index c566622c6..05130bfd0 100644 --- a/docs/tutorial-code/min-max-timeslice-constraints/1-min-constraint/settings.toml +++ b/docs/tutorial-code/min-max-timeslice-constraints/1-min-constraint/settings.toml @@ -41,7 +41,7 @@ commodities_out = '{path}/technodata/residential/CommOut.csv' [sectors.residential.subsectors.all] agents = '{path}/technodata/Agents.csv' existing_capacity = '{path}/technodata/residential/ExistingCapacity.csv' -lpsolver = "scipy" # Optional, defaults to "adhoc" +lpsolver = "scipy" # Optional, defaults to "scipy" constraints = [ # Optional, defaults to the constraints below "max_production", diff --git a/docs/tutorial-code/min-max-timeslice-constraints/2-max-constraint/settings.toml b/docs/tutorial-code/min-max-timeslice-constraints/2-max-constraint/settings.toml index c566622c6..05130bfd0 100644 --- a/docs/tutorial-code/min-max-timeslice-constraints/2-max-constraint/settings.toml +++ b/docs/tutorial-code/min-max-timeslice-constraints/2-max-constraint/settings.toml @@ -41,7 +41,7 @@ commodities_out = '{path}/technodata/residential/CommOut.csv' [sectors.residential.subsectors.all] agents = '{path}/technodata/Agents.csv' existing_capacity = '{path}/technodata/residential/ExistingCapacity.csv' -lpsolver = "scipy" # Optional, defaults to "adhoc" +lpsolver = "scipy" # Optional, defaults to "scipy" constraints = [ # Optional, defaults to the constraints below "max_production", diff --git a/docs/tutorial-code/modify-timing-data/1-modify-timeslices/settings.toml b/docs/tutorial-code/modify-timing-data/1-modify-timeslices/settings.toml index ebee78bfb..679f3c266 100644 --- a/docs/tutorial-code/modify-timing-data/1-modify-timeslices/settings.toml +++ b/docs/tutorial-code/modify-timing-data/1-modify-timeslices/settings.toml @@ -41,7 +41,7 @@ commodities_out = '{path}/technodata/residential/CommOut.csv' [sectors.residential.subsectors.all] agents = '{path}/technodata/Agents.csv' existing_capacity = '{path}/technodata/residential/ExistingCapacity.csv' -lpsolver = "scipy" # Optional, defaults to "adhoc" +lpsolver = "scipy" # Optional, defaults to "scipy" constraints = [ # Optional, defaults to the constraints below "max_production", diff --git a/docs/tutorial-code/modify-timing-data/2-modify-time-framework/settings.toml b/docs/tutorial-code/modify-timing-data/2-modify-time-framework/settings.toml index fc03a7ced..4a264e54d 100644 --- a/docs/tutorial-code/modify-timing-data/2-modify-time-framework/settings.toml +++ b/docs/tutorial-code/modify-timing-data/2-modify-time-framework/settings.toml @@ -41,7 +41,7 @@ commodities_out = '{path}/technodata/residential/CommOut.csv' [sectors.residential.subsectors.all] agents = '{path}/technodata/Agents.csv' existing_capacity = '{path}/technodata/residential/ExistingCapacity.csv' -lpsolver = "scipy" # Optional, defaults to "adhoc" +lpsolver = "scipy" # Optional, defaults to "scipy" constraints = [ # Optional, defaults to the constraints below "max_production", diff --git a/docs/tutorial-code/new-decision-metric/Results/MCAMetric_Consumption.csv b/docs/tutorial-code/new-decision-metric/Results/MCAMetric_Consumption.csv deleted file mode 100644 index 4014c90f2..000000000 --- a/docs/tutorial-code/new-decision-metric/Results/MCAMetric_Consumption.csv +++ /dev/null @@ -1,301 +0,0 @@ -agent,asset,category,comm_usage,commodity,consumption,day,dst_region,hour,installed,month,region,sector,technology,timeslice,units_prices,year -A1,0,newcapa,CONSUMABLE|ENERGY,gas,1.16000000000,all-week,R1,night,2020,all-year,R1,residential,gasboiler,0,MUS$2010/PJ,2020 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,1.74000000000,all-week,R1,morning,2020,all-year,R1,residential,gasboiler,1,MUS$2010/PJ,2020 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,1.16000000000,all-week,R1,afternoon,2020,all-year,R1,residential,gasboiler,2,MUS$2010/PJ,2020 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,1.74000000000,all-week,R1,early-peak,2020,all-year,R1,residential,gasboiler,3,MUS$2010/PJ,2020 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,1.93333333333,all-week,R1,late-peak,2020,all-year,R1,residential,gasboiler,4,MUS$2010/PJ,2020 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,1.93333333333,all-week,R1,evening,2020,all-year,R1,residential,gasboiler,5,MUS$2010/PJ,2020 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.32222222222,all-week,R1,night,2020,all-year,R1,residential,gasboiler,0,MUS$2010/PJ,2025 -A1,1,newcapa,CONSUMABLE|ENERGY,electricity,0.42222222222,all-week,R1,night,2020,all-year,R1,residential,heatpump,0,MUS$2010/PJ,2025 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.48333333333,all-week,R1,morning,2020,all-year,R1,residential,gasboiler,1,MUS$2010/PJ,2025 -A1,1,newcapa,CONSUMABLE|ENERGY,electricity,0.63333333333,all-week,R1,morning,2020,all-year,R1,residential,heatpump,1,MUS$2010/PJ,2025 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.32222222222,all-week,R1,afternoon,2020,all-year,R1,residential,gasboiler,2,MUS$2010/PJ,2025 -A1,1,newcapa,CONSUMABLE|ENERGY,electricity,0.42222222222,all-week,R1,afternoon,2020,all-year,R1,residential,heatpump,2,MUS$2010/PJ,2025 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.48333333333,all-week,R1,early-peak,2020,all-year,R1,residential,gasboiler,3,MUS$2010/PJ,2025 -A1,1,newcapa,CONSUMABLE|ENERGY,electricity,0.63333333333,all-week,R1,early-peak,2020,all-year,R1,residential,heatpump,3,MUS$2010/PJ,2025 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.96666666667,all-week,R1,late-peak,2020,all-year,R1,residential,gasboiler,4,MUS$2010/PJ,2025 -A1,1,newcapa,CONSUMABLE|ENERGY,electricity,1.26666666667,all-week,R1,late-peak,2020,all-year,R1,residential,heatpump,4,MUS$2010/PJ,2025 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.64444444444,all-week,R1,evening,2020,all-year,R1,residential,gasboiler,5,MUS$2010/PJ,2025 -A1,1,newcapa,CONSUMABLE|ENERGY,electricity,0.84444444444,all-week,R1,evening,2020,all-year,R1,residential,heatpump,5,MUS$2010/PJ,2025 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.18131428571,all-week,R1,night,2020,all-year,R1,power,gasCCGT,0,MUS$2010/PJ,2025 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.24126984127,all-week,R1,night,2020,all-year,R1,power,solarPV,0,MUS$2010/kt,2025 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.07238095238,all-week,R1,night,2020,all-year,R1,power,windturbine,0,MUS$2010/kt,2025 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.27197142857,all-week,R1,morning,2020,all-year,R1,power,gasCCGT,1,MUS$2010/PJ,2025 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.36190476190,all-week,R1,morning,2020,all-year,R1,power,solarPV,1,MUS$2010/kt,2025 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.10857142857,all-week,R1,morning,2020,all-year,R1,power,windturbine,1,MUS$2010/kt,2025 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.18131428571,all-week,R1,afternoon,2020,all-year,R1,power,gasCCGT,2,MUS$2010/PJ,2025 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.24126984127,all-week,R1,afternoon,2020,all-year,R1,power,solarPV,2,MUS$2010/kt,2025 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.07238095238,all-week,R1,afternoon,2020,all-year,R1,power,windturbine,2,MUS$2010/kt,2025 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.27197142857,all-week,R1,early-peak,2020,all-year,R1,power,gasCCGT,3,MUS$2010/PJ,2025 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.36190476190,all-week,R1,early-peak,2020,all-year,R1,power,solarPV,3,MUS$2010/kt,2025 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.10857142857,all-week,R1,early-peak,2020,all-year,R1,power,windturbine,3,MUS$2010/kt,2025 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.50100000000,all-week,R1,late-peak,2020,all-year,R1,power,gasCCGT,4,MUS$2010/PJ,2025 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.66666666667,all-week,R1,late-peak,2020,all-year,R1,power,solarPV,4,MUS$2010/kt,2025 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.20000000000,all-week,R1,late-peak,2020,all-year,R1,power,windturbine,4,MUS$2010/kt,2025 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.36262857143,all-week,R1,evening,2020,all-year,R1,power,gasCCGT,5,MUS$2010/PJ,2025 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.48253968254,all-week,R1,evening,2020,all-year,R1,power,solarPV,5,MUS$2010/kt,2025 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.14476190476,all-week,R1,evening,2020,all-year,R1,power,windturbine,5,MUS$2010/kt,2025 -A1,1,newcapa,CONSUMABLE|ENERGY,electricity,0.43678160920,all-week,R1,night,2020,all-year,R1,residential,heatpump,0,MUS$2010/PJ,2030 -A1,2,newcapa,CONSUMABLE|ENERGY,electricity,0.22988505747,all-week,R1,night,2025,all-year,R1,residential,heatpump,0,MUS$2010/PJ,2030 -A1,1,newcapa,CONSUMABLE|ENERGY,electricity,0.65517241379,all-week,R1,morning,2020,all-year,R1,residential,heatpump,1,MUS$2010/PJ,2030 -A1,2,newcapa,CONSUMABLE|ENERGY,electricity,0.34482758621,all-week,R1,morning,2025,all-year,R1,residential,heatpump,1,MUS$2010/PJ,2030 -A1,1,newcapa,CONSUMABLE|ENERGY,electricity,0.43678160920,all-week,R1,afternoon,2020,all-year,R1,residential,heatpump,2,MUS$2010/PJ,2030 -A1,2,newcapa,CONSUMABLE|ENERGY,electricity,0.22988505747,all-week,R1,afternoon,2025,all-year,R1,residential,heatpump,2,MUS$2010/PJ,2030 -A1,1,newcapa,CONSUMABLE|ENERGY,electricity,0.65517241379,all-week,R1,early-peak,2020,all-year,R1,residential,heatpump,3,MUS$2010/PJ,2030 -A1,2,newcapa,CONSUMABLE|ENERGY,electricity,0.34482758621,all-week,R1,early-peak,2025,all-year,R1,residential,heatpump,3,MUS$2010/PJ,2030 -A1,1,newcapa,CONSUMABLE|ENERGY,electricity,1.26666666667,all-week,R1,late-peak,2020,all-year,R1,residential,heatpump,4,MUS$2010/PJ,2030 -A1,2,newcapa,CONSUMABLE|ENERGY,electricity,0.66666666667,all-week,R1,late-peak,2025,all-year,R1,residential,heatpump,4,MUS$2010/PJ,2030 -A1,1,newcapa,CONSUMABLE|ENERGY,electricity,0.87356321839,all-week,R1,evening,2020,all-year,R1,residential,heatpump,5,MUS$2010/PJ,2030 -A1,2,newcapa,CONSUMABLE|ENERGY,electricity,0.45977011494,all-week,R1,evening,2025,all-year,R1,residential,heatpump,5,MUS$2010/PJ,2030 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.09634615385,all-week,R1,night,2020,all-year,R1,power,gasCCGT,0,MUS$2010/PJ,2030 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.25641025641,all-week,R1,night,2020,all-year,R1,power,solarPV,0,MUS$2010/kt,2030 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.07692307692,all-week,R1,night,2020,all-year,R1,power,windturbine,0,MUS$2010/kt,2030 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.28903846154,all-week,R1,night,2025,all-year,R1,power,gasCCGT,0,MUS$2010/PJ,2030 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.10256410256,all-week,R1,night,2025,all-year,R1,power,solarPV,0,MUS$2010/kt,2030 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.14451923077,all-week,R1,morning,2020,all-year,R1,power,gasCCGT,1,MUS$2010/PJ,2030 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.38461538462,all-week,R1,morning,2020,all-year,R1,power,solarPV,1,MUS$2010/kt,2030 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.11538461538,all-week,R1,morning,2020,all-year,R1,power,windturbine,1,MUS$2010/kt,2030 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.43355769231,all-week,R1,morning,2025,all-year,R1,power,gasCCGT,1,MUS$2010/PJ,2030 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.15384615385,all-week,R1,morning,2025,all-year,R1,power,solarPV,1,MUS$2010/kt,2030 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.09634615385,all-week,R1,afternoon,2020,all-year,R1,power,gasCCGT,2,MUS$2010/PJ,2030 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.25641025641,all-week,R1,afternoon,2020,all-year,R1,power,solarPV,2,MUS$2010/kt,2030 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.07692307692,all-week,R1,afternoon,2020,all-year,R1,power,windturbine,2,MUS$2010/kt,2030 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.28903846154,all-week,R1,afternoon,2025,all-year,R1,power,gasCCGT,2,MUS$2010/PJ,2030 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.10256410256,all-week,R1,afternoon,2025,all-year,R1,power,solarPV,2,MUS$2010/kt,2030 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.14451923077,all-week,R1,early-peak,2020,all-year,R1,power,gasCCGT,3,MUS$2010/PJ,2030 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.38461538462,all-week,R1,early-peak,2020,all-year,R1,power,solarPV,3,MUS$2010/kt,2030 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.11538461538,all-week,R1,early-peak,2020,all-year,R1,power,windturbine,3,MUS$2010/kt,2030 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.43355769231,all-week,R1,early-peak,2025,all-year,R1,power,gasCCGT,3,MUS$2010/PJ,2030 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.15384615385,all-week,R1,early-peak,2025,all-year,R1,power,solarPV,3,MUS$2010/kt,2030 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.25050000000,all-week,R1,late-peak,2020,all-year,R1,power,gasCCGT,4,MUS$2010/PJ,2030 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.66666666667,all-week,R1,late-peak,2020,all-year,R1,power,solarPV,4,MUS$2010/kt,2030 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.20000000000,all-week,R1,late-peak,2020,all-year,R1,power,windturbine,4,MUS$2010/kt,2030 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.75150000000,all-week,R1,late-peak,2025,all-year,R1,power,gasCCGT,4,MUS$2010/PJ,2030 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.26666666667,all-week,R1,late-peak,2025,all-year,R1,power,solarPV,4,MUS$2010/kt,2030 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.19269230769,all-week,R1,evening,2020,all-year,R1,power,gasCCGT,5,MUS$2010/PJ,2030 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.51282051282,all-week,R1,evening,2020,all-year,R1,power,solarPV,5,MUS$2010/kt,2030 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.15384615385,all-week,R1,evening,2020,all-year,R1,power,windturbine,5,MUS$2010/kt,2030 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.57807692308,all-week,R1,evening,2025,all-year,R1,power,gasCCGT,5,MUS$2010/PJ,2030 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.20512820513,all-week,R1,evening,2025,all-year,R1,power,solarPV,5,MUS$2010/kt,2030 -A1,2,newcapa,CONSUMABLE|ENERGY,electricity,0.22222222222,all-week,R1,night,2025,all-year,R1,residential,heatpump,0,MUS$2010/PJ,2035 -A1,3,newcapa,CONSUMABLE|ENERGY,electricity,0.57777777778,all-week,R1,night,2030,all-year,R1,residential,heatpump,0,MUS$2010/PJ,2035 -A1,2,newcapa,CONSUMABLE|ENERGY,electricity,0.33333333333,all-week,R1,morning,2025,all-year,R1,residential,heatpump,1,MUS$2010/PJ,2035 -A1,3,newcapa,CONSUMABLE|ENERGY,electricity,0.86666666667,all-week,R1,morning,2030,all-year,R1,residential,heatpump,1,MUS$2010/PJ,2035 -A1,2,newcapa,CONSUMABLE|ENERGY,electricity,0.22222222222,all-week,R1,afternoon,2025,all-year,R1,residential,heatpump,2,MUS$2010/PJ,2035 -A1,3,newcapa,CONSUMABLE|ENERGY,electricity,0.57777777778,all-week,R1,afternoon,2030,all-year,R1,residential,heatpump,2,MUS$2010/PJ,2035 -A1,2,newcapa,CONSUMABLE|ENERGY,electricity,0.33333333333,all-week,R1,early-peak,2025,all-year,R1,residential,heatpump,3,MUS$2010/PJ,2035 -A1,3,newcapa,CONSUMABLE|ENERGY,electricity,0.86666666667,all-week,R1,early-peak,2030,all-year,R1,residential,heatpump,3,MUS$2010/PJ,2035 -A1,2,newcapa,CONSUMABLE|ENERGY,electricity,0.66666666667,all-week,R1,late-peak,2025,all-year,R1,residential,heatpump,4,MUS$2010/PJ,2035 -A1,3,newcapa,CONSUMABLE|ENERGY,electricity,1.73333333333,all-week,R1,late-peak,2030,all-year,R1,residential,heatpump,4,MUS$2010/PJ,2035 -A1,2,newcapa,CONSUMABLE|ENERGY,electricity,0.44444444444,all-week,R1,evening,2025,all-year,R1,residential,heatpump,5,MUS$2010/PJ,2035 -A1,3,newcapa,CONSUMABLE|ENERGY,electricity,1.15555555556,all-week,R1,evening,2030,all-year,R1,residential,heatpump,5,MUS$2010/PJ,2035 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.08350000000,all-week,R1,night,2020,all-year,R1,power,gasCCGT,0,MUS$2010/PJ,2035 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.22222222222,all-week,R1,night,2020,all-year,R1,power,solarPV,0,MUS$2010/kt,2035 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.06666666667,all-week,R1,night,2020,all-year,R1,power,windturbine,0,MUS$2010/kt,2035 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.25050000000,all-week,R1,night,2025,all-year,R1,power,gasCCGT,0,MUS$2010/PJ,2035 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.08888888889,all-week,R1,night,2025,all-year,R1,power,solarPV,0,MUS$2010/kt,2035 -A1,5,newcapa,CONSUMABLE|ENERGY,solar,0.22222222222,all-week,R1,night,2030,all-year,R1,power,solarPV,0,MUS$2010/kt,2035 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.12525000000,all-week,R1,morning,2020,all-year,R1,power,gasCCGT,1,MUS$2010/PJ,2035 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.33333333333,all-week,R1,morning,2020,all-year,R1,power,solarPV,1,MUS$2010/kt,2035 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.10000000000,all-week,R1,morning,2020,all-year,R1,power,windturbine,1,MUS$2010/kt,2035 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.37575000000,all-week,R1,morning,2025,all-year,R1,power,gasCCGT,1,MUS$2010/PJ,2035 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.13333333333,all-week,R1,morning,2025,all-year,R1,power,solarPV,1,MUS$2010/kt,2035 -A1,5,newcapa,CONSUMABLE|ENERGY,solar,0.33333333333,all-week,R1,morning,2030,all-year,R1,power,solarPV,1,MUS$2010/kt,2035 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.08350000000,all-week,R1,afternoon,2020,all-year,R1,power,gasCCGT,2,MUS$2010/PJ,2035 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.22222222222,all-week,R1,afternoon,2020,all-year,R1,power,solarPV,2,MUS$2010/kt,2035 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.06666666667,all-week,R1,afternoon,2020,all-year,R1,power,windturbine,2,MUS$2010/kt,2035 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.25050000000,all-week,R1,afternoon,2025,all-year,R1,power,gasCCGT,2,MUS$2010/PJ,2035 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.08888888889,all-week,R1,afternoon,2025,all-year,R1,power,solarPV,2,MUS$2010/kt,2035 -A1,5,newcapa,CONSUMABLE|ENERGY,solar,0.22222222222,all-week,R1,afternoon,2030,all-year,R1,power,solarPV,2,MUS$2010/kt,2035 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.12525000000,all-week,R1,early-peak,2020,all-year,R1,power,gasCCGT,3,MUS$2010/PJ,2035 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.33333333333,all-week,R1,early-peak,2020,all-year,R1,power,solarPV,3,MUS$2010/kt,2035 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.10000000000,all-week,R1,early-peak,2020,all-year,R1,power,windturbine,3,MUS$2010/kt,2035 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.37575000000,all-week,R1,early-peak,2025,all-year,R1,power,gasCCGT,3,MUS$2010/PJ,2035 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.13333333333,all-week,R1,early-peak,2025,all-year,R1,power,solarPV,3,MUS$2010/kt,2035 -A1,5,newcapa,CONSUMABLE|ENERGY,solar,0.33333333333,all-week,R1,early-peak,2030,all-year,R1,power,solarPV,3,MUS$2010/kt,2035 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.25050000000,all-week,R1,late-peak,2020,all-year,R1,power,gasCCGT,4,MUS$2010/PJ,2035 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.66666666667,all-week,R1,late-peak,2020,all-year,R1,power,solarPV,4,MUS$2010/kt,2035 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.20000000000,all-week,R1,late-peak,2020,all-year,R1,power,windturbine,4,MUS$2010/kt,2035 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.75150000000,all-week,R1,late-peak,2025,all-year,R1,power,gasCCGT,4,MUS$2010/PJ,2035 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.26666666667,all-week,R1,late-peak,2025,all-year,R1,power,solarPV,4,MUS$2010/kt,2035 -A1,5,newcapa,CONSUMABLE|ENERGY,solar,0.66666666667,all-week,R1,late-peak,2030,all-year,R1,power,solarPV,4,MUS$2010/kt,2035 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.16700000000,all-week,R1,evening,2020,all-year,R1,power,gasCCGT,5,MUS$2010/PJ,2035 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.44444444444,all-week,R1,evening,2020,all-year,R1,power,solarPV,5,MUS$2010/kt,2035 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.13333333333,all-week,R1,evening,2020,all-year,R1,power,windturbine,5,MUS$2010/kt,2035 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.50100000000,all-week,R1,evening,2025,all-year,R1,power,gasCCGT,5,MUS$2010/PJ,2035 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.17777777778,all-week,R1,evening,2025,all-year,R1,power,solarPV,5,MUS$2010/kt,2035 -A1,5,newcapa,CONSUMABLE|ENERGY,solar,0.44444444444,all-week,R1,evening,2030,all-year,R1,power,solarPV,5,MUS$2010/kt,2035 -A1,3,newcapa,CONSUMABLE|ENERGY,electricity,0.57777777778,all-week,R1,night,2030,all-year,R1,residential,heatpump,0,MUS$2010/PJ,2040 -A1,4,newcapa,CONSUMABLE|ENERGY,electricity,0.35555555556,all-week,R1,night,2035,all-year,R1,residential,heatpump,0,MUS$2010/PJ,2040 -A1,3,newcapa,CONSUMABLE|ENERGY,electricity,0.86666666667,all-week,R1,morning,2030,all-year,R1,residential,heatpump,1,MUS$2010/PJ,2040 -A1,4,newcapa,CONSUMABLE|ENERGY,electricity,0.53333333333,all-week,R1,morning,2035,all-year,R1,residential,heatpump,1,MUS$2010/PJ,2040 -A1,3,newcapa,CONSUMABLE|ENERGY,electricity,0.57777777778,all-week,R1,afternoon,2030,all-year,R1,residential,heatpump,2,MUS$2010/PJ,2040 -A1,4,newcapa,CONSUMABLE|ENERGY,electricity,0.35555555556,all-week,R1,afternoon,2035,all-year,R1,residential,heatpump,2,MUS$2010/PJ,2040 -A1,3,newcapa,CONSUMABLE|ENERGY,electricity,0.86666666667,all-week,R1,early-peak,2030,all-year,R1,residential,heatpump,3,MUS$2010/PJ,2040 -A1,4,newcapa,CONSUMABLE|ENERGY,electricity,0.53333333333,all-week,R1,early-peak,2035,all-year,R1,residential,heatpump,3,MUS$2010/PJ,2040 -A1,3,newcapa,CONSUMABLE|ENERGY,electricity,1.73333333333,all-week,R1,late-peak,2030,all-year,R1,residential,heatpump,4,MUS$2010/PJ,2040 -A1,4,newcapa,CONSUMABLE|ENERGY,electricity,1.06666666667,all-week,R1,late-peak,2035,all-year,R1,residential,heatpump,4,MUS$2010/PJ,2040 -A1,3,newcapa,CONSUMABLE|ENERGY,electricity,1.15555555556,all-week,R1,evening,2030,all-year,R1,residential,heatpump,5,MUS$2010/PJ,2040 -A1,4,newcapa,CONSUMABLE|ENERGY,electricity,0.71111111111,all-week,R1,evening,2035,all-year,R1,residential,heatpump,5,MUS$2010/PJ,2040 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.08350000000,all-week,R1,night,2020,all-year,R1,power,gasCCGT,0,MUS$2010/PJ,2040 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.22222222222,all-week,R1,night,2020,all-year,R1,power,solarPV,0,MUS$2010/kt,2040 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.06666666667,all-week,R1,night,2020,all-year,R1,power,windturbine,0,MUS$2010/kt,2040 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.25050000000,all-week,R1,night,2025,all-year,R1,power,gasCCGT,0,MUS$2010/PJ,2040 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.08888888889,all-week,R1,night,2025,all-year,R1,power,solarPV,0,MUS$2010/kt,2040 -A1,5,newcapa,CONSUMABLE|ENERGY,solar,0.22222222222,all-week,R1,night,2030,all-year,R1,power,solarPV,0,MUS$2010/kt,2040 -A1,6,newcapa,CONSUMABLE|ENERGY,solar,0.13333333333,all-week,R1,night,2035,all-year,R1,power,solarPV,0,MUS$2010/kt,2040 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.12525000000,all-week,R1,morning,2020,all-year,R1,power,gasCCGT,1,MUS$2010/PJ,2040 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.33333333333,all-week,R1,morning,2020,all-year,R1,power,solarPV,1,MUS$2010/kt,2040 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.10000000000,all-week,R1,morning,2020,all-year,R1,power,windturbine,1,MUS$2010/kt,2040 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.37575000000,all-week,R1,morning,2025,all-year,R1,power,gasCCGT,1,MUS$2010/PJ,2040 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.13333333333,all-week,R1,morning,2025,all-year,R1,power,solarPV,1,MUS$2010/kt,2040 -A1,5,newcapa,CONSUMABLE|ENERGY,solar,0.33333333333,all-week,R1,morning,2030,all-year,R1,power,solarPV,1,MUS$2010/kt,2040 -A1,6,newcapa,CONSUMABLE|ENERGY,solar,0.20000000000,all-week,R1,morning,2035,all-year,R1,power,solarPV,1,MUS$2010/kt,2040 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.08350000000,all-week,R1,afternoon,2020,all-year,R1,power,gasCCGT,2,MUS$2010/PJ,2040 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.22222222222,all-week,R1,afternoon,2020,all-year,R1,power,solarPV,2,MUS$2010/kt,2040 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.06666666667,all-week,R1,afternoon,2020,all-year,R1,power,windturbine,2,MUS$2010/kt,2040 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.25050000000,all-week,R1,afternoon,2025,all-year,R1,power,gasCCGT,2,MUS$2010/PJ,2040 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.08888888889,all-week,R1,afternoon,2025,all-year,R1,power,solarPV,2,MUS$2010/kt,2040 -A1,5,newcapa,CONSUMABLE|ENERGY,solar,0.22222222222,all-week,R1,afternoon,2030,all-year,R1,power,solarPV,2,MUS$2010/kt,2040 -A1,6,newcapa,CONSUMABLE|ENERGY,solar,0.13333333333,all-week,R1,afternoon,2035,all-year,R1,power,solarPV,2,MUS$2010/kt,2040 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.12525000000,all-week,R1,early-peak,2020,all-year,R1,power,gasCCGT,3,MUS$2010/PJ,2040 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.33333333333,all-week,R1,early-peak,2020,all-year,R1,power,solarPV,3,MUS$2010/kt,2040 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.10000000000,all-week,R1,early-peak,2020,all-year,R1,power,windturbine,3,MUS$2010/kt,2040 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.37575000000,all-week,R1,early-peak,2025,all-year,R1,power,gasCCGT,3,MUS$2010/PJ,2040 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.13333333333,all-week,R1,early-peak,2025,all-year,R1,power,solarPV,3,MUS$2010/kt,2040 -A1,5,newcapa,CONSUMABLE|ENERGY,solar,0.33333333333,all-week,R1,early-peak,2030,all-year,R1,power,solarPV,3,MUS$2010/kt,2040 -A1,6,newcapa,CONSUMABLE|ENERGY,solar,0.20000000000,all-week,R1,early-peak,2035,all-year,R1,power,solarPV,3,MUS$2010/kt,2040 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.25050000000,all-week,R1,late-peak,2020,all-year,R1,power,gasCCGT,4,MUS$2010/PJ,2040 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.66666666667,all-week,R1,late-peak,2020,all-year,R1,power,solarPV,4,MUS$2010/kt,2040 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.20000000000,all-week,R1,late-peak,2020,all-year,R1,power,windturbine,4,MUS$2010/kt,2040 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.75150000000,all-week,R1,late-peak,2025,all-year,R1,power,gasCCGT,4,MUS$2010/PJ,2040 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.26666666667,all-week,R1,late-peak,2025,all-year,R1,power,solarPV,4,MUS$2010/kt,2040 -A1,5,newcapa,CONSUMABLE|ENERGY,solar,0.66666666667,all-week,R1,late-peak,2030,all-year,R1,power,solarPV,4,MUS$2010/kt,2040 -A1,6,newcapa,CONSUMABLE|ENERGY,solar,0.40000000000,all-week,R1,late-peak,2035,all-year,R1,power,solarPV,4,MUS$2010/kt,2040 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.16700000000,all-week,R1,evening,2020,all-year,R1,power,gasCCGT,5,MUS$2010/PJ,2040 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.44444444444,all-week,R1,evening,2020,all-year,R1,power,solarPV,5,MUS$2010/kt,2040 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.13333333333,all-week,R1,evening,2020,all-year,R1,power,windturbine,5,MUS$2010/kt,2040 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.50100000000,all-week,R1,evening,2025,all-year,R1,power,gasCCGT,5,MUS$2010/PJ,2040 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.17777777778,all-week,R1,evening,2025,all-year,R1,power,solarPV,5,MUS$2010/kt,2040 -A1,5,newcapa,CONSUMABLE|ENERGY,solar,0.44444444444,all-week,R1,evening,2030,all-year,R1,power,solarPV,5,MUS$2010/kt,2040 -A1,6,newcapa,CONSUMABLE|ENERGY,solar,0.26666666667,all-week,R1,evening,2035,all-year,R1,power,solarPV,5,MUS$2010/kt,2040 -A1,4,newcapa,CONSUMABLE|ENERGY,electricity,0.35555555556,all-week,R1,night,2035,all-year,R1,residential,heatpump,0,MUS$2010/PJ,2045 -A1,5,newcapa,CONSUMABLE|ENERGY,electricity,0.71111111111,all-week,R1,night,2040,all-year,R1,residential,heatpump,0,MUS$2010/PJ,2045 -A1,4,newcapa,CONSUMABLE|ENERGY,electricity,0.53333333333,all-week,R1,morning,2035,all-year,R1,residential,heatpump,1,MUS$2010/PJ,2045 -A1,5,newcapa,CONSUMABLE|ENERGY,electricity,1.06666666667,all-week,R1,morning,2040,all-year,R1,residential,heatpump,1,MUS$2010/PJ,2045 -A1,4,newcapa,CONSUMABLE|ENERGY,electricity,0.35555555556,all-week,R1,afternoon,2035,all-year,R1,residential,heatpump,2,MUS$2010/PJ,2045 -A1,5,newcapa,CONSUMABLE|ENERGY,electricity,0.71111111111,all-week,R1,afternoon,2040,all-year,R1,residential,heatpump,2,MUS$2010/PJ,2045 -A1,4,newcapa,CONSUMABLE|ENERGY,electricity,0.53333333333,all-week,R1,early-peak,2035,all-year,R1,residential,heatpump,3,MUS$2010/PJ,2045 -A1,5,newcapa,CONSUMABLE|ENERGY,electricity,1.06666666667,all-week,R1,early-peak,2040,all-year,R1,residential,heatpump,3,MUS$2010/PJ,2045 -A1,4,newcapa,CONSUMABLE|ENERGY,electricity,1.06666666667,all-week,R1,late-peak,2035,all-year,R1,residential,heatpump,4,MUS$2010/PJ,2045 -A1,5,newcapa,CONSUMABLE|ENERGY,electricity,2.13333333333,all-week,R1,late-peak,2040,all-year,R1,residential,heatpump,4,MUS$2010/PJ,2045 -A1,4,newcapa,CONSUMABLE|ENERGY,electricity,0.71111111111,all-week,R1,evening,2035,all-year,R1,residential,heatpump,5,MUS$2010/PJ,2045 -A1,5,newcapa,CONSUMABLE|ENERGY,electricity,1.42222222222,all-week,R1,evening,2040,all-year,R1,residential,heatpump,5,MUS$2010/PJ,2045 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.08350000000,all-week,R1,night,2020,all-year,R1,power,gasCCGT,0,MUS$2010/PJ,2045 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.22222222222,all-week,R1,night,2020,all-year,R1,power,solarPV,0,MUS$2010/kt,2045 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.06666666667,all-week,R1,night,2020,all-year,R1,power,windturbine,0,MUS$2010/kt,2045 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.25050000000,all-week,R1,night,2025,all-year,R1,power,gasCCGT,0,MUS$2010/PJ,2045 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.08888888889,all-week,R1,night,2025,all-year,R1,power,solarPV,0,MUS$2010/kt,2045 -A1,5,newcapa,CONSUMABLE|ENERGY,solar,0.22222222222,all-week,R1,night,2030,all-year,R1,power,solarPV,0,MUS$2010/kt,2045 -A1,6,newcapa,CONSUMABLE|ENERGY,solar,0.13333333333,all-week,R1,night,2035,all-year,R1,power,solarPV,0,MUS$2010/kt,2045 -A1,7,newcapa,CONSUMABLE|ENERGY,solar,0.13333333333,all-week,R1,night,2040,all-year,R1,power,solarPV,0,MUS$2010/kt,2045 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.12525000000,all-week,R1,morning,2020,all-year,R1,power,gasCCGT,1,MUS$2010/PJ,2045 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.33333333333,all-week,R1,morning,2020,all-year,R1,power,solarPV,1,MUS$2010/kt,2045 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.10000000000,all-week,R1,morning,2020,all-year,R1,power,windturbine,1,MUS$2010/kt,2045 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.37575000000,all-week,R1,morning,2025,all-year,R1,power,gasCCGT,1,MUS$2010/PJ,2045 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.13333333333,all-week,R1,morning,2025,all-year,R1,power,solarPV,1,MUS$2010/kt,2045 -A1,5,newcapa,CONSUMABLE|ENERGY,solar,0.33333333333,all-week,R1,morning,2030,all-year,R1,power,solarPV,1,MUS$2010/kt,2045 -A1,6,newcapa,CONSUMABLE|ENERGY,solar,0.20000000000,all-week,R1,morning,2035,all-year,R1,power,solarPV,1,MUS$2010/kt,2045 -A1,7,newcapa,CONSUMABLE|ENERGY,solar,0.20000000000,all-week,R1,morning,2040,all-year,R1,power,solarPV,1,MUS$2010/kt,2045 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.08350000000,all-week,R1,afternoon,2020,all-year,R1,power,gasCCGT,2,MUS$2010/PJ,2045 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.22222222222,all-week,R1,afternoon,2020,all-year,R1,power,solarPV,2,MUS$2010/kt,2045 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.06666666667,all-week,R1,afternoon,2020,all-year,R1,power,windturbine,2,MUS$2010/kt,2045 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.25050000000,all-week,R1,afternoon,2025,all-year,R1,power,gasCCGT,2,MUS$2010/PJ,2045 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.08888888889,all-week,R1,afternoon,2025,all-year,R1,power,solarPV,2,MUS$2010/kt,2045 -A1,5,newcapa,CONSUMABLE|ENERGY,solar,0.22222222222,all-week,R1,afternoon,2030,all-year,R1,power,solarPV,2,MUS$2010/kt,2045 -A1,6,newcapa,CONSUMABLE|ENERGY,solar,0.13333333333,all-week,R1,afternoon,2035,all-year,R1,power,solarPV,2,MUS$2010/kt,2045 -A1,7,newcapa,CONSUMABLE|ENERGY,solar,0.13333333333,all-week,R1,afternoon,2040,all-year,R1,power,solarPV,2,MUS$2010/kt,2045 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.12525000000,all-week,R1,early-peak,2020,all-year,R1,power,gasCCGT,3,MUS$2010/PJ,2045 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.33333333333,all-week,R1,early-peak,2020,all-year,R1,power,solarPV,3,MUS$2010/kt,2045 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.10000000000,all-week,R1,early-peak,2020,all-year,R1,power,windturbine,3,MUS$2010/kt,2045 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.37575000000,all-week,R1,early-peak,2025,all-year,R1,power,gasCCGT,3,MUS$2010/PJ,2045 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.13333333333,all-week,R1,early-peak,2025,all-year,R1,power,solarPV,3,MUS$2010/kt,2045 -A1,5,newcapa,CONSUMABLE|ENERGY,solar,0.33333333333,all-week,R1,early-peak,2030,all-year,R1,power,solarPV,3,MUS$2010/kt,2045 -A1,6,newcapa,CONSUMABLE|ENERGY,solar,0.20000000000,all-week,R1,early-peak,2035,all-year,R1,power,solarPV,3,MUS$2010/kt,2045 -A1,7,newcapa,CONSUMABLE|ENERGY,solar,0.20000000000,all-week,R1,early-peak,2040,all-year,R1,power,solarPV,3,MUS$2010/kt,2045 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.25050000000,all-week,R1,late-peak,2020,all-year,R1,power,gasCCGT,4,MUS$2010/PJ,2045 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.66666666667,all-week,R1,late-peak,2020,all-year,R1,power,solarPV,4,MUS$2010/kt,2045 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.20000000000,all-week,R1,late-peak,2020,all-year,R1,power,windturbine,4,MUS$2010/kt,2045 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.75150000000,all-week,R1,late-peak,2025,all-year,R1,power,gasCCGT,4,MUS$2010/PJ,2045 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.26666666667,all-week,R1,late-peak,2025,all-year,R1,power,solarPV,4,MUS$2010/kt,2045 -A1,5,newcapa,CONSUMABLE|ENERGY,solar,0.66666666667,all-week,R1,late-peak,2030,all-year,R1,power,solarPV,4,MUS$2010/kt,2045 -A1,6,newcapa,CONSUMABLE|ENERGY,solar,0.40000000000,all-week,R1,late-peak,2035,all-year,R1,power,solarPV,4,MUS$2010/kt,2045 -A1,7,newcapa,CONSUMABLE|ENERGY,solar,0.40000000000,all-week,R1,late-peak,2040,all-year,R1,power,solarPV,4,MUS$2010/kt,2045 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.16700000000,all-week,R1,evening,2020,all-year,R1,power,gasCCGT,5,MUS$2010/PJ,2045 -A1,1,newcapa,CONSUMABLE|ENERGY,solar,0.44444444444,all-week,R1,evening,2020,all-year,R1,power,solarPV,5,MUS$2010/kt,2045 -A1,2,newcapa,CONSUMABLE|ENERGY,wind,0.13333333333,all-week,R1,evening,2020,all-year,R1,power,windturbine,5,MUS$2010/kt,2045 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.50100000000,all-week,R1,evening,2025,all-year,R1,power,gasCCGT,5,MUS$2010/PJ,2045 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.17777777778,all-week,R1,evening,2025,all-year,R1,power,solarPV,5,MUS$2010/kt,2045 -A1,5,newcapa,CONSUMABLE|ENERGY,solar,0.44444444444,all-week,R1,evening,2030,all-year,R1,power,solarPV,5,MUS$2010/kt,2045 -A1,6,newcapa,CONSUMABLE|ENERGY,solar,0.26666666667,all-week,R1,evening,2035,all-year,R1,power,solarPV,5,MUS$2010/kt,2045 -A1,7,newcapa,CONSUMABLE|ENERGY,solar,0.26666666667,all-week,R1,evening,2040,all-year,R1,power,solarPV,5,MUS$2010/kt,2045 -A1,5,newcapa,CONSUMABLE|ENERGY,electricity,0.71111111111,all-week,R1,night,2040,all-year,R1,residential,heatpump,0,MUS$2010/PJ,2050 -A1,6,newcapa,CONSUMABLE|ENERGY,electricity,0.48888888889,all-week,R1,night,2045,all-year,R1,residential,heatpump,0,MUS$2010/PJ,2050 -A1,5,newcapa,CONSUMABLE|ENERGY,electricity,1.06666666667,all-week,R1,morning,2040,all-year,R1,residential,heatpump,1,MUS$2010/PJ,2050 -A1,6,newcapa,CONSUMABLE|ENERGY,electricity,0.73333333333,all-week,R1,morning,2045,all-year,R1,residential,heatpump,1,MUS$2010/PJ,2050 -A1,5,newcapa,CONSUMABLE|ENERGY,electricity,0.71111111111,all-week,R1,afternoon,2040,all-year,R1,residential,heatpump,2,MUS$2010/PJ,2050 -A1,6,newcapa,CONSUMABLE|ENERGY,electricity,0.48888888889,all-week,R1,afternoon,2045,all-year,R1,residential,heatpump,2,MUS$2010/PJ,2050 -A1,5,newcapa,CONSUMABLE|ENERGY,electricity,1.06666666667,all-week,R1,early-peak,2040,all-year,R1,residential,heatpump,3,MUS$2010/PJ,2050 -A1,6,newcapa,CONSUMABLE|ENERGY,electricity,0.73333333333,all-week,R1,early-peak,2045,all-year,R1,residential,heatpump,3,MUS$2010/PJ,2050 -A1,5,newcapa,CONSUMABLE|ENERGY,electricity,2.13333333333,all-week,R1,late-peak,2040,all-year,R1,residential,heatpump,4,MUS$2010/PJ,2050 -A1,6,newcapa,CONSUMABLE|ENERGY,electricity,1.46666666667,all-week,R1,late-peak,2045,all-year,R1,residential,heatpump,4,MUS$2010/PJ,2050 -A1,5,newcapa,CONSUMABLE|ENERGY,electricity,1.42222222222,all-week,R1,evening,2040,all-year,R1,residential,heatpump,5,MUS$2010/PJ,2050 -A1,6,newcapa,CONSUMABLE|ENERGY,electricity,0.97777777778,all-week,R1,evening,2045,all-year,R1,residential,heatpump,5,MUS$2010/PJ,2050 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.08671153846,all-week,R1,night,2020,all-year,R1,power,gasCCGT,0,MUS$2010/PJ,2050 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.26013461538,all-week,R1,night,2025,all-year,R1,power,gasCCGT,0,MUS$2010/PJ,2050 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.09230769231,all-week,R1,night,2025,all-year,R1,power,solarPV,0,MUS$2010/kt,2050 -A1,5,newcapa,CONSUMABLE|ENERGY,solar,0.23076923077,all-week,R1,night,2030,all-year,R1,power,solarPV,0,MUS$2010/kt,2050 -A1,6,newcapa,CONSUMABLE|ENERGY,solar,0.13846153846,all-week,R1,night,2035,all-year,R1,power,solarPV,0,MUS$2010/kt,2050 -A1,7,newcapa,CONSUMABLE|ENERGY,solar,0.13846153846,all-week,R1,night,2040,all-year,R1,power,solarPV,0,MUS$2010/kt,2050 -A1,9,newcapa,CONSUMABLE|ENERGY,solar,0.23076923077,all-week,R1,night,2045,all-year,R1,power,solarPV,0,MUS$2010/kt,2050 -A1,10,newcapa,CONSUMABLE|ENERGY,wind,0.16153846154,all-week,R1,night,2045,all-year,R1,power,windturbine,0,MUS$2010/kt,2050 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.13006730769,all-week,R1,morning,2020,all-year,R1,power,gasCCGT,1,MUS$2010/PJ,2050 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.39020192308,all-week,R1,morning,2025,all-year,R1,power,gasCCGT,1,MUS$2010/PJ,2050 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.13846153846,all-week,R1,morning,2025,all-year,R1,power,solarPV,1,MUS$2010/kt,2050 -A1,5,newcapa,CONSUMABLE|ENERGY,solar,0.34615384615,all-week,R1,morning,2030,all-year,R1,power,solarPV,1,MUS$2010/kt,2050 -A1,6,newcapa,CONSUMABLE|ENERGY,solar,0.20769230769,all-week,R1,morning,2035,all-year,R1,power,solarPV,1,MUS$2010/kt,2050 -A1,7,newcapa,CONSUMABLE|ENERGY,solar,0.20769230769,all-week,R1,morning,2040,all-year,R1,power,solarPV,1,MUS$2010/kt,2050 -A1,9,newcapa,CONSUMABLE|ENERGY,solar,0.34615384615,all-week,R1,morning,2045,all-year,R1,power,solarPV,1,MUS$2010/kt,2050 -A1,10,newcapa,CONSUMABLE|ENERGY,wind,0.24230769231,all-week,R1,morning,2045,all-year,R1,power,windturbine,1,MUS$2010/kt,2050 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.08671153846,all-week,R1,afternoon,2020,all-year,R1,power,gasCCGT,2,MUS$2010/PJ,2050 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.26013461538,all-week,R1,afternoon,2025,all-year,R1,power,gasCCGT,2,MUS$2010/PJ,2050 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.09230769231,all-week,R1,afternoon,2025,all-year,R1,power,solarPV,2,MUS$2010/kt,2050 -A1,5,newcapa,CONSUMABLE|ENERGY,solar,0.23076923077,all-week,R1,afternoon,2030,all-year,R1,power,solarPV,2,MUS$2010/kt,2050 -A1,6,newcapa,CONSUMABLE|ENERGY,solar,0.13846153846,all-week,R1,afternoon,2035,all-year,R1,power,solarPV,2,MUS$2010/kt,2050 -A1,7,newcapa,CONSUMABLE|ENERGY,solar,0.13846153846,all-week,R1,afternoon,2040,all-year,R1,power,solarPV,2,MUS$2010/kt,2050 -A1,9,newcapa,CONSUMABLE|ENERGY,solar,0.23076923077,all-week,R1,afternoon,2045,all-year,R1,power,solarPV,2,MUS$2010/kt,2050 -A1,10,newcapa,CONSUMABLE|ENERGY,wind,0.16153846154,all-week,R1,afternoon,2045,all-year,R1,power,windturbine,2,MUS$2010/kt,2050 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.13006730769,all-week,R1,early-peak,2020,all-year,R1,power,gasCCGT,3,MUS$2010/PJ,2050 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.39020192308,all-week,R1,early-peak,2025,all-year,R1,power,gasCCGT,3,MUS$2010/PJ,2050 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.13846153846,all-week,R1,early-peak,2025,all-year,R1,power,solarPV,3,MUS$2010/kt,2050 -A1,5,newcapa,CONSUMABLE|ENERGY,solar,0.34615384615,all-week,R1,early-peak,2030,all-year,R1,power,solarPV,3,MUS$2010/kt,2050 -A1,6,newcapa,CONSUMABLE|ENERGY,solar,0.20769230769,all-week,R1,early-peak,2035,all-year,R1,power,solarPV,3,MUS$2010/kt,2050 -A1,7,newcapa,CONSUMABLE|ENERGY,solar,0.20769230769,all-week,R1,early-peak,2040,all-year,R1,power,solarPV,3,MUS$2010/kt,2050 -A1,9,newcapa,CONSUMABLE|ENERGY,solar,0.34615384615,all-week,R1,early-peak,2045,all-year,R1,power,solarPV,3,MUS$2010/kt,2050 -A1,10,newcapa,CONSUMABLE|ENERGY,wind,0.24230769231,all-week,R1,early-peak,2045,all-year,R1,power,windturbine,3,MUS$2010/kt,2050 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.25050000000,all-week,R1,late-peak,2020,all-year,R1,power,gasCCGT,4,MUS$2010/PJ,2050 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.75150000000,all-week,R1,late-peak,2025,all-year,R1,power,gasCCGT,4,MUS$2010/PJ,2050 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.26666666667,all-week,R1,late-peak,2025,all-year,R1,power,solarPV,4,MUS$2010/kt,2050 -A1,5,newcapa,CONSUMABLE|ENERGY,solar,0.66666666667,all-week,R1,late-peak,2030,all-year,R1,power,solarPV,4,MUS$2010/kt,2050 -A1,6,newcapa,CONSUMABLE|ENERGY,solar,0.40000000000,all-week,R1,late-peak,2035,all-year,R1,power,solarPV,4,MUS$2010/kt,2050 -A1,7,newcapa,CONSUMABLE|ENERGY,solar,0.40000000000,all-week,R1,late-peak,2040,all-year,R1,power,solarPV,4,MUS$2010/kt,2050 -A1,9,newcapa,CONSUMABLE|ENERGY,solar,0.66666666667,all-week,R1,late-peak,2045,all-year,R1,power,solarPV,4,MUS$2010/kt,2050 -A1,10,newcapa,CONSUMABLE|ENERGY,wind,0.46666666667,all-week,R1,late-peak,2045,all-year,R1,power,windturbine,4,MUS$2010/kt,2050 -A1,0,newcapa,CONSUMABLE|ENERGY,gas,0.17342307692,all-week,R1,evening,2020,all-year,R1,power,gasCCGT,5,MUS$2010/PJ,2050 -A1,3,newcapa,CONSUMABLE|ENERGY,gas,0.52026923077,all-week,R1,evening,2025,all-year,R1,power,gasCCGT,5,MUS$2010/PJ,2050 -A1,4,newcapa,CONSUMABLE|ENERGY,solar,0.18461538462,all-week,R1,evening,2025,all-year,R1,power,solarPV,5,MUS$2010/kt,2050 -A1,5,newcapa,CONSUMABLE|ENERGY,solar,0.46153846154,all-week,R1,evening,2030,all-year,R1,power,solarPV,5,MUS$2010/kt,2050 -A1,6,newcapa,CONSUMABLE|ENERGY,solar,0.27692307692,all-week,R1,evening,2035,all-year,R1,power,solarPV,5,MUS$2010/kt,2050 -A1,7,newcapa,CONSUMABLE|ENERGY,solar,0.27692307692,all-week,R1,evening,2040,all-year,R1,power,solarPV,5,MUS$2010/kt,2050 -A1,9,newcapa,CONSUMABLE|ENERGY,solar,0.46153846154,all-week,R1,evening,2045,all-year,R1,power,solarPV,5,MUS$2010/kt,2050 -A1,10,newcapa,CONSUMABLE|ENERGY,wind,0.32307692308,all-week,R1,evening,2045,all-year,R1,power,windturbine,5,MUS$2010/kt,2050 diff --git a/docs/tutorial-code/new-decision-metric/Results/MCAMetric_Supply.csv b/docs/tutorial-code/new-decision-metric/Results/MCAMetric_Supply.csv deleted file mode 100644 index 408c4bcbb..000000000 --- a/docs/tutorial-code/new-decision-metric/Results/MCAMetric_Supply.csv +++ /dev/null @@ -1,421 +0,0 @@ -agent,asset,category,comm_usage,commodity,day,dst_region,hour,installed,month,region,sector,supply,technology,timeslice,units_prices,year -A1,0,newcapa,PRODUCT|ENERGY,heat,all-week,R1,night,2020,all-year,R1,residential,1.00000000000,gasboiler,0,MUS$2010/PJ,2020 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,night,2020,all-year,R1,residential,64.71000000000,gasboiler,0,MUS$2010/kt,2020 -A1,0,newcapa,PRODUCT|ENERGY,heat,all-week,R1,morning,2020,all-year,R1,residential,1.50000000000,gasboiler,1,MUS$2010/PJ,2020 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,morning,2020,all-year,R1,residential,97.06500000000,gasboiler,1,MUS$2010/kt,2020 -A1,0,newcapa,PRODUCT|ENERGY,heat,all-week,R1,afternoon,2020,all-year,R1,residential,1.00000000000,gasboiler,2,MUS$2010/PJ,2020 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,afternoon,2020,all-year,R1,residential,64.71000000000,gasboiler,2,MUS$2010/kt,2020 -A1,0,newcapa,PRODUCT|ENERGY,heat,all-week,R1,early-peak,2020,all-year,R1,residential,1.50000000000,gasboiler,3,MUS$2010/PJ,2020 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,early-peak,2020,all-year,R1,residential,97.06500000000,gasboiler,3,MUS$2010/kt,2020 -A1,0,newcapa,PRODUCT|ENERGY,heat,all-week,R1,late-peak,2020,all-year,R1,residential,1.66666666667,gasboiler,4,MUS$2010/PJ,2020 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,late-peak,2020,all-year,R1,residential,107.85000000000,gasboiler,4,MUS$2010/kt,2020 -A1,0,newcapa,PRODUCT|ENERGY,heat,all-week,R1,evening,2020,all-year,R1,residential,1.66666666667,gasboiler,5,MUS$2010/PJ,2020 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,evening,2020,all-year,R1,residential,107.85000000000,gasboiler,5,MUS$2010/kt,2020 -A1,0,newcapa,PRODUCT|ENERGY,gas,all-week,R1,night,2020,all-year,R1,gas,1.16000000000,gassupply1,0,MUS$2010/PJ,2020 -A1,0,newcapa,PRODUCT|ENERGY,gas,all-week,R1,morning,2020,all-year,R1,gas,1.74000000000,gassupply1,1,MUS$2010/PJ,2020 -A1,0,newcapa,PRODUCT|ENERGY,gas,all-week,R1,afternoon,2020,all-year,R1,gas,1.16000000000,gassupply1,2,MUS$2010/PJ,2020 -A1,0,newcapa,PRODUCT|ENERGY,gas,all-week,R1,early-peak,2020,all-year,R1,gas,1.74000000000,gassupply1,3,MUS$2010/PJ,2020 -A1,0,newcapa,PRODUCT|ENERGY,gas,all-week,R1,late-peak,2020,all-year,R1,gas,1.93333333333,gassupply1,4,MUS$2010/PJ,2020 -A1,0,newcapa,PRODUCT|ENERGY,gas,all-week,R1,evening,2020,all-year,R1,gas,1.93333333333,gassupply1,5,MUS$2010/PJ,2020 -A1,0,newcapa,PRODUCT|ENERGY,heat,all-week,R1,night,2020,all-year,R1,residential,0.27777777778,gasboiler,0,MUS$2010/PJ,2025 -A1,1,newcapa,PRODUCT|ENERGY,heat,all-week,R1,night,2020,all-year,R1,residential,1.05555555556,heatpump,0,MUS$2010/PJ,2025 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,night,2020,all-year,R1,residential,17.97500000000,gasboiler,0,MUS$2010/kt,2025 -A1,0,newcapa,PRODUCT|ENERGY,heat,all-week,R1,morning,2020,all-year,R1,residential,0.41666666667,gasboiler,1,MUS$2010/PJ,2025 -A1,1,newcapa,PRODUCT|ENERGY,heat,all-week,R1,morning,2020,all-year,R1,residential,1.58333333333,heatpump,1,MUS$2010/PJ,2025 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,morning,2020,all-year,R1,residential,26.96250000000,gasboiler,1,MUS$2010/kt,2025 -A1,0,newcapa,PRODUCT|ENERGY,heat,all-week,R1,afternoon,2020,all-year,R1,residential,0.27777777778,gasboiler,2,MUS$2010/PJ,2025 -A1,1,newcapa,PRODUCT|ENERGY,heat,all-week,R1,afternoon,2020,all-year,R1,residential,1.05555555556,heatpump,2,MUS$2010/PJ,2025 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,afternoon,2020,all-year,R1,residential,17.97500000000,gasboiler,2,MUS$2010/kt,2025 -A1,0,newcapa,PRODUCT|ENERGY,heat,all-week,R1,early-peak,2020,all-year,R1,residential,0.41666666667,gasboiler,3,MUS$2010/PJ,2025 -A1,1,newcapa,PRODUCT|ENERGY,heat,all-week,R1,early-peak,2020,all-year,R1,residential,1.58333333333,heatpump,3,MUS$2010/PJ,2025 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,early-peak,2020,all-year,R1,residential,26.96250000000,gasboiler,3,MUS$2010/kt,2025 -A1,0,newcapa,PRODUCT|ENERGY,heat,all-week,R1,late-peak,2020,all-year,R1,residential,0.83333333333,gasboiler,4,MUS$2010/PJ,2025 -A1,1,newcapa,PRODUCT|ENERGY,heat,all-week,R1,late-peak,2020,all-year,R1,residential,3.16666666667,heatpump,4,MUS$2010/PJ,2025 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,late-peak,2020,all-year,R1,residential,53.92500000000,gasboiler,4,MUS$2010/kt,2025 -A1,0,newcapa,PRODUCT|ENERGY,heat,all-week,R1,evening,2020,all-year,R1,residential,0.55555555556,gasboiler,5,MUS$2010/PJ,2025 -A1,1,newcapa,PRODUCT|ENERGY,heat,all-week,R1,evening,2020,all-year,R1,residential,2.11111111111,heatpump,5,MUS$2010/PJ,2025 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,evening,2020,all-year,R1,residential,35.95000000000,gasboiler,5,MUS$2010/kt,2025 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2020,all-year,R1,power,0.10857142857,gasCCGT,0,MUS$2010/PJ,2025 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2020,all-year,R1,power,0.24126984127,solarPV,0,MUS$2010/PJ,2025 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2020,all-year,R1,power,0.07238095238,windturbine,0,MUS$2010/PJ,2025 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,night,2020,all-year,R1,power,9.95274285714,gasCCGT,0,MUS$2010/kt,2025 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2020,all-year,R1,power,0.16285714286,gasCCGT,1,MUS$2010/PJ,2025 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2020,all-year,R1,power,0.36190476190,solarPV,1,MUS$2010/PJ,2025 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2020,all-year,R1,power,0.10857142857,windturbine,1,MUS$2010/PJ,2025 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,morning,2020,all-year,R1,power,14.92911428571,gasCCGT,1,MUS$2010/kt,2025 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2020,all-year,R1,power,0.10857142857,gasCCGT,2,MUS$2010/PJ,2025 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2020,all-year,R1,power,0.24126984127,solarPV,2,MUS$2010/PJ,2025 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2020,all-year,R1,power,0.07238095238,windturbine,2,MUS$2010/PJ,2025 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,afternoon,2020,all-year,R1,power,9.95274285714,gasCCGT,2,MUS$2010/kt,2025 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2020,all-year,R1,power,0.16285714286,gasCCGT,3,MUS$2010/PJ,2025 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2020,all-year,R1,power,0.36190476190,solarPV,3,MUS$2010/PJ,2025 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2020,all-year,R1,power,0.10857142857,windturbine,3,MUS$2010/PJ,2025 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,early-peak,2020,all-year,R1,power,14.92911428571,gasCCGT,3,MUS$2010/kt,2025 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2020,all-year,R1,power,0.30000000000,gasCCGT,4,MUS$2010/PJ,2025 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2020,all-year,R1,power,0.66666666667,solarPV,4,MUS$2010/PJ,2025 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2020,all-year,R1,power,0.20000000000,windturbine,4,MUS$2010/PJ,2025 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,late-peak,2020,all-year,R1,power,27.50100000000,gasCCGT,4,MUS$2010/kt,2025 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2020,all-year,R1,power,0.21714285714,gasCCGT,5,MUS$2010/PJ,2025 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2020,all-year,R1,power,0.48253968254,solarPV,5,MUS$2010/PJ,2025 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2020,all-year,R1,power,0.14476190476,windturbine,5,MUS$2010/PJ,2025 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,evening,2020,all-year,R1,power,19.90548571429,gasCCGT,5,MUS$2010/kt,2025 -A1,0,newcapa,PRODUCT|ENERGY,gas,all-week,R1,night,2020,all-year,R1,gas,0.50353650794,gassupply1,0,MUS$2010/PJ,2025 -A1,0,newcapa,PRODUCT|ENERGY,gas,all-week,R1,morning,2020,all-year,R1,gas,0.75530476190,gassupply1,1,MUS$2010/PJ,2025 -A1,0,newcapa,PRODUCT|ENERGY,gas,all-week,R1,afternoon,2020,all-year,R1,gas,0.50353650794,gassupply1,2,MUS$2010/PJ,2025 -A1,0,newcapa,PRODUCT|ENERGY,gas,all-week,R1,early-peak,2020,all-year,R1,gas,0.75530476190,gassupply1,3,MUS$2010/PJ,2025 -A1,0,newcapa,PRODUCT|ENERGY,gas,all-week,R1,late-peak,2020,all-year,R1,gas,1.46766666667,gassupply1,4,MUS$2010/PJ,2025 -A1,0,newcapa,PRODUCT|ENERGY,gas,all-week,R1,evening,2020,all-year,R1,gas,1.00707301587,gassupply1,5,MUS$2010/PJ,2025 -A1,1,newcapa,PRODUCT|ENERGY,heat,all-week,R1,night,2020,all-year,R1,residential,1.09195402299,heatpump,0,MUS$2010/PJ,2030 -A1,2,newcapa,PRODUCT|ENERGY,heat,all-week,R1,night,2025,all-year,R1,residential,0.57471264368,heatpump,0,MUS$2010/PJ,2030 -A1,1,newcapa,PRODUCT|ENERGY,heat,all-week,R1,morning,2020,all-year,R1,residential,1.63793103448,heatpump,1,MUS$2010/PJ,2030 -A1,2,newcapa,PRODUCT|ENERGY,heat,all-week,R1,morning,2025,all-year,R1,residential,0.86206896552,heatpump,1,MUS$2010/PJ,2030 -A1,1,newcapa,PRODUCT|ENERGY,heat,all-week,R1,afternoon,2020,all-year,R1,residential,1.09195402299,heatpump,2,MUS$2010/PJ,2030 -A1,2,newcapa,PRODUCT|ENERGY,heat,all-week,R1,afternoon,2025,all-year,R1,residential,0.57471264368,heatpump,2,MUS$2010/PJ,2030 -A1,1,newcapa,PRODUCT|ENERGY,heat,all-week,R1,early-peak,2020,all-year,R1,residential,1.63793103448,heatpump,3,MUS$2010/PJ,2030 -A1,2,newcapa,PRODUCT|ENERGY,heat,all-week,R1,early-peak,2025,all-year,R1,residential,0.86206896552,heatpump,3,MUS$2010/PJ,2030 -A1,1,newcapa,PRODUCT|ENERGY,heat,all-week,R1,late-peak,2020,all-year,R1,residential,3.16666666667,heatpump,4,MUS$2010/PJ,2030 -A1,2,newcapa,PRODUCT|ENERGY,heat,all-week,R1,late-peak,2025,all-year,R1,residential,1.66666666667,heatpump,4,MUS$2010/PJ,2030 -A1,1,newcapa,PRODUCT|ENERGY,heat,all-week,R1,evening,2020,all-year,R1,residential,2.18390804598,heatpump,5,MUS$2010/PJ,2030 -A1,2,newcapa,PRODUCT|ENERGY,heat,all-week,R1,evening,2025,all-year,R1,residential,1.14942528736,heatpump,5,MUS$2010/PJ,2030 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2020,all-year,R1,power,0.05769230769,gasCCGT,0,MUS$2010/PJ,2030 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2020,all-year,R1,power,0.25641025641,solarPV,0,MUS$2010/PJ,2030 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2020,all-year,R1,power,0.07692307692,windturbine,0,MUS$2010/PJ,2030 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2025,all-year,R1,power,0.17307692308,gasCCGT,0,MUS$2010/PJ,2030 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2025,all-year,R1,power,0.10256410256,solarPV,0,MUS$2010/PJ,2030 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,night,2020,all-year,R1,power,5.28865384615,gasCCGT,0,MUS$2010/kt,2030 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,night,2025,all-year,R1,power,15.86596153846,gasCCGT,0,MUS$2010/kt,2030 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2020,all-year,R1,power,0.08653846154,gasCCGT,1,MUS$2010/PJ,2030 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2020,all-year,R1,power,0.38461538462,solarPV,1,MUS$2010/PJ,2030 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2020,all-year,R1,power,0.11538461538,windturbine,1,MUS$2010/PJ,2030 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2025,all-year,R1,power,0.25961538462,gasCCGT,1,MUS$2010/PJ,2030 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2025,all-year,R1,power,0.15384615385,solarPV,1,MUS$2010/PJ,2030 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,morning,2020,all-year,R1,power,7.93298076923,gasCCGT,1,MUS$2010/kt,2030 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,morning,2025,all-year,R1,power,23.79894230769,gasCCGT,1,MUS$2010/kt,2030 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2020,all-year,R1,power,0.05769230769,gasCCGT,2,MUS$2010/PJ,2030 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2020,all-year,R1,power,0.25641025641,solarPV,2,MUS$2010/PJ,2030 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2020,all-year,R1,power,0.07692307692,windturbine,2,MUS$2010/PJ,2030 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2025,all-year,R1,power,0.17307692308,gasCCGT,2,MUS$2010/PJ,2030 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2025,all-year,R1,power,0.10256410256,solarPV,2,MUS$2010/PJ,2030 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,afternoon,2020,all-year,R1,power,5.28865384615,gasCCGT,2,MUS$2010/kt,2030 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,afternoon,2025,all-year,R1,power,15.86596153846,gasCCGT,2,MUS$2010/kt,2030 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2020,all-year,R1,power,0.08653846154,gasCCGT,3,MUS$2010/PJ,2030 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2020,all-year,R1,power,0.38461538462,solarPV,3,MUS$2010/PJ,2030 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2020,all-year,R1,power,0.11538461538,windturbine,3,MUS$2010/PJ,2030 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2025,all-year,R1,power,0.25961538462,gasCCGT,3,MUS$2010/PJ,2030 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2025,all-year,R1,power,0.15384615385,solarPV,3,MUS$2010/PJ,2030 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,early-peak,2020,all-year,R1,power,7.93298076923,gasCCGT,3,MUS$2010/kt,2030 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,early-peak,2025,all-year,R1,power,23.79894230769,gasCCGT,3,MUS$2010/kt,2030 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2020,all-year,R1,power,0.15000000000,gasCCGT,4,MUS$2010/PJ,2030 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2020,all-year,R1,power,0.66666666667,solarPV,4,MUS$2010/PJ,2030 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2020,all-year,R1,power,0.20000000000,windturbine,4,MUS$2010/PJ,2030 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2025,all-year,R1,power,0.45000000000,gasCCGT,4,MUS$2010/PJ,2030 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2025,all-year,R1,power,0.26666666667,solarPV,4,MUS$2010/PJ,2030 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,late-peak,2020,all-year,R1,power,13.75050000000,gasCCGT,4,MUS$2010/kt,2030 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,late-peak,2025,all-year,R1,power,41.25150000000,gasCCGT,4,MUS$2010/kt,2030 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2020,all-year,R1,power,0.11538461538,gasCCGT,5,MUS$2010/PJ,2030 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2020,all-year,R1,power,0.51282051282,solarPV,5,MUS$2010/PJ,2030 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2020,all-year,R1,power,0.15384615385,windturbine,5,MUS$2010/PJ,2030 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2025,all-year,R1,power,0.34615384615,gasCCGT,5,MUS$2010/PJ,2030 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2025,all-year,R1,power,0.20512820513,solarPV,5,MUS$2010/PJ,2030 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,evening,2020,all-year,R1,power,10.57730769231,gasCCGT,5,MUS$2010/kt,2030 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,evening,2025,all-year,R1,power,31.73192307692,gasCCGT,5,MUS$2010/kt,2030 -A1,0,newcapa,PRODUCT|ENERGY,gas,all-week,R1,night,2020,all-year,R1,gas,0.38538461538,gassupply1,0,MUS$2010/PJ,2030 -A1,0,newcapa,PRODUCT|ENERGY,gas,all-week,R1,morning,2020,all-year,R1,gas,0.57807692308,gassupply1,1,MUS$2010/PJ,2030 -A1,0,newcapa,PRODUCT|ENERGY,gas,all-week,R1,afternoon,2020,all-year,R1,gas,0.38538461538,gassupply1,2,MUS$2010/PJ,2030 -A1,0,newcapa,PRODUCT|ENERGY,gas,all-week,R1,early-peak,2020,all-year,R1,gas,0.57807692308,gassupply1,3,MUS$2010/PJ,2030 -A1,0,newcapa,PRODUCT|ENERGY,gas,all-week,R1,late-peak,2020,all-year,R1,gas,1.00200000000,gassupply1,4,MUS$2010/PJ,2030 -A1,0,newcapa,PRODUCT|ENERGY,gas,all-week,R1,evening,2020,all-year,R1,gas,0.77076923077,gassupply1,5,MUS$2010/PJ,2030 -A1,2,newcapa,PRODUCT|ENERGY,heat,all-week,R1,night,2025,all-year,R1,residential,0.55555555556,heatpump,0,MUS$2010/PJ,2035 -A1,3,newcapa,PRODUCT|ENERGY,heat,all-week,R1,night,2030,all-year,R1,residential,1.44444444444,heatpump,0,MUS$2010/PJ,2035 -A1,2,newcapa,PRODUCT|ENERGY,heat,all-week,R1,morning,2025,all-year,R1,residential,0.83333333333,heatpump,1,MUS$2010/PJ,2035 -A1,3,newcapa,PRODUCT|ENERGY,heat,all-week,R1,morning,2030,all-year,R1,residential,2.16666666667,heatpump,1,MUS$2010/PJ,2035 -A1,2,newcapa,PRODUCT|ENERGY,heat,all-week,R1,afternoon,2025,all-year,R1,residential,0.55555555556,heatpump,2,MUS$2010/PJ,2035 -A1,3,newcapa,PRODUCT|ENERGY,heat,all-week,R1,afternoon,2030,all-year,R1,residential,1.44444444444,heatpump,2,MUS$2010/PJ,2035 -A1,2,newcapa,PRODUCT|ENERGY,heat,all-week,R1,early-peak,2025,all-year,R1,residential,0.83333333333,heatpump,3,MUS$2010/PJ,2035 -A1,3,newcapa,PRODUCT|ENERGY,heat,all-week,R1,early-peak,2030,all-year,R1,residential,2.16666666667,heatpump,3,MUS$2010/PJ,2035 -A1,2,newcapa,PRODUCT|ENERGY,heat,all-week,R1,late-peak,2025,all-year,R1,residential,1.66666666667,heatpump,4,MUS$2010/PJ,2035 -A1,3,newcapa,PRODUCT|ENERGY,heat,all-week,R1,late-peak,2030,all-year,R1,residential,4.33333333333,heatpump,4,MUS$2010/PJ,2035 -A1,2,newcapa,PRODUCT|ENERGY,heat,all-week,R1,evening,2025,all-year,R1,residential,1.11111111111,heatpump,5,MUS$2010/PJ,2035 -A1,3,newcapa,PRODUCT|ENERGY,heat,all-week,R1,evening,2030,all-year,R1,residential,2.88888888889,heatpump,5,MUS$2010/PJ,2035 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2020,all-year,R1,power,0.05000000000,gasCCGT,0,MUS$2010/PJ,2035 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2020,all-year,R1,power,0.22222222222,solarPV,0,MUS$2010/PJ,2035 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2020,all-year,R1,power,0.06666666667,windturbine,0,MUS$2010/PJ,2035 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2025,all-year,R1,power,0.15000000000,gasCCGT,0,MUS$2010/PJ,2035 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2025,all-year,R1,power,0.08888888889,solarPV,0,MUS$2010/PJ,2035 -A1,5,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2030,all-year,R1,power,0.22222222222,solarPV,0,MUS$2010/PJ,2035 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,night,2020,all-year,R1,power,4.58350000000,gasCCGT,0,MUS$2010/kt,2035 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,night,2025,all-year,R1,power,13.75050000000,gasCCGT,0,MUS$2010/kt,2035 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2020,all-year,R1,power,0.07500000000,gasCCGT,1,MUS$2010/PJ,2035 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2020,all-year,R1,power,0.33333333333,solarPV,1,MUS$2010/PJ,2035 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2020,all-year,R1,power,0.10000000000,windturbine,1,MUS$2010/PJ,2035 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2025,all-year,R1,power,0.22500000000,gasCCGT,1,MUS$2010/PJ,2035 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2025,all-year,R1,power,0.13333333333,solarPV,1,MUS$2010/PJ,2035 -A1,5,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2030,all-year,R1,power,0.33333333333,solarPV,1,MUS$2010/PJ,2035 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,morning,2020,all-year,R1,power,6.87525000000,gasCCGT,1,MUS$2010/kt,2035 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,morning,2025,all-year,R1,power,20.62575000000,gasCCGT,1,MUS$2010/kt,2035 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2020,all-year,R1,power,0.05000000000,gasCCGT,2,MUS$2010/PJ,2035 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2020,all-year,R1,power,0.22222222222,solarPV,2,MUS$2010/PJ,2035 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2020,all-year,R1,power,0.06666666667,windturbine,2,MUS$2010/PJ,2035 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2025,all-year,R1,power,0.15000000000,gasCCGT,2,MUS$2010/PJ,2035 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2025,all-year,R1,power,0.08888888889,solarPV,2,MUS$2010/PJ,2035 -A1,5,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2030,all-year,R1,power,0.22222222222,solarPV,2,MUS$2010/PJ,2035 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,afternoon,2020,all-year,R1,power,4.58350000000,gasCCGT,2,MUS$2010/kt,2035 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,afternoon,2025,all-year,R1,power,13.75050000000,gasCCGT,2,MUS$2010/kt,2035 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2020,all-year,R1,power,0.07500000000,gasCCGT,3,MUS$2010/PJ,2035 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2020,all-year,R1,power,0.33333333333,solarPV,3,MUS$2010/PJ,2035 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2020,all-year,R1,power,0.10000000000,windturbine,3,MUS$2010/PJ,2035 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2025,all-year,R1,power,0.22500000000,gasCCGT,3,MUS$2010/PJ,2035 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2025,all-year,R1,power,0.13333333333,solarPV,3,MUS$2010/PJ,2035 -A1,5,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2030,all-year,R1,power,0.33333333333,solarPV,3,MUS$2010/PJ,2035 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,early-peak,2020,all-year,R1,power,6.87525000000,gasCCGT,3,MUS$2010/kt,2035 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,early-peak,2025,all-year,R1,power,20.62575000000,gasCCGT,3,MUS$2010/kt,2035 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2020,all-year,R1,power,0.15000000000,gasCCGT,4,MUS$2010/PJ,2035 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2020,all-year,R1,power,0.66666666667,solarPV,4,MUS$2010/PJ,2035 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2020,all-year,R1,power,0.20000000000,windturbine,4,MUS$2010/PJ,2035 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2025,all-year,R1,power,0.45000000000,gasCCGT,4,MUS$2010/PJ,2035 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2025,all-year,R1,power,0.26666666667,solarPV,4,MUS$2010/PJ,2035 -A1,5,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2030,all-year,R1,power,0.66666666667,solarPV,4,MUS$2010/PJ,2035 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,late-peak,2020,all-year,R1,power,13.75050000000,gasCCGT,4,MUS$2010/kt,2035 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,late-peak,2025,all-year,R1,power,41.25150000000,gasCCGT,4,MUS$2010/kt,2035 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2020,all-year,R1,power,0.10000000000,gasCCGT,5,MUS$2010/PJ,2035 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2020,all-year,R1,power,0.44444444444,solarPV,5,MUS$2010/PJ,2035 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2020,all-year,R1,power,0.13333333333,windturbine,5,MUS$2010/PJ,2035 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2025,all-year,R1,power,0.30000000000,gasCCGT,5,MUS$2010/PJ,2035 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2025,all-year,R1,power,0.17777777778,solarPV,5,MUS$2010/PJ,2035 -A1,5,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2030,all-year,R1,power,0.44444444444,solarPV,5,MUS$2010/PJ,2035 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,evening,2020,all-year,R1,power,9.16700000000,gasCCGT,5,MUS$2010/kt,2035 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,evening,2025,all-year,R1,power,27.50100000000,gasCCGT,5,MUS$2010/kt,2035 -A1,1,newcapa,PRODUCT|ENERGY,gas,all-week,R1,night,2030,all-year,R1,gas,0.33400000000,gassupply1,0,MUS$2010/PJ,2035 -A1,1,newcapa,PRODUCT|ENERGY,gas,all-week,R1,morning,2030,all-year,R1,gas,0.50100000000,gassupply1,1,MUS$2010/PJ,2035 -A1,1,newcapa,PRODUCT|ENERGY,gas,all-week,R1,afternoon,2030,all-year,R1,gas,0.33400000000,gassupply1,2,MUS$2010/PJ,2035 -A1,1,newcapa,PRODUCT|ENERGY,gas,all-week,R1,early-peak,2030,all-year,R1,gas,0.50100000000,gassupply1,3,MUS$2010/PJ,2035 -A1,1,newcapa,PRODUCT|ENERGY,gas,all-week,R1,late-peak,2030,all-year,R1,gas,1.00200000000,gassupply1,4,MUS$2010/PJ,2035 -A1,1,newcapa,PRODUCT|ENERGY,gas,all-week,R1,evening,2030,all-year,R1,gas,0.66800000000,gassupply1,5,MUS$2010/PJ,2035 -A1,3,newcapa,PRODUCT|ENERGY,heat,all-week,R1,night,2030,all-year,R1,residential,1.44444444444,heatpump,0,MUS$2010/PJ,2040 -A1,4,newcapa,PRODUCT|ENERGY,heat,all-week,R1,night,2035,all-year,R1,residential,0.88888888889,heatpump,0,MUS$2010/PJ,2040 -A1,3,newcapa,PRODUCT|ENERGY,heat,all-week,R1,morning,2030,all-year,R1,residential,2.16666666667,heatpump,1,MUS$2010/PJ,2040 -A1,4,newcapa,PRODUCT|ENERGY,heat,all-week,R1,morning,2035,all-year,R1,residential,1.33333333333,heatpump,1,MUS$2010/PJ,2040 -A1,3,newcapa,PRODUCT|ENERGY,heat,all-week,R1,afternoon,2030,all-year,R1,residential,1.44444444444,heatpump,2,MUS$2010/PJ,2040 -A1,4,newcapa,PRODUCT|ENERGY,heat,all-week,R1,afternoon,2035,all-year,R1,residential,0.88888888889,heatpump,2,MUS$2010/PJ,2040 -A1,3,newcapa,PRODUCT|ENERGY,heat,all-week,R1,early-peak,2030,all-year,R1,residential,2.16666666667,heatpump,3,MUS$2010/PJ,2040 -A1,4,newcapa,PRODUCT|ENERGY,heat,all-week,R1,early-peak,2035,all-year,R1,residential,1.33333333333,heatpump,3,MUS$2010/PJ,2040 -A1,3,newcapa,PRODUCT|ENERGY,heat,all-week,R1,late-peak,2030,all-year,R1,residential,4.33333333333,heatpump,4,MUS$2010/PJ,2040 -A1,4,newcapa,PRODUCT|ENERGY,heat,all-week,R1,late-peak,2035,all-year,R1,residential,2.66666666667,heatpump,4,MUS$2010/PJ,2040 -A1,3,newcapa,PRODUCT|ENERGY,heat,all-week,R1,evening,2030,all-year,R1,residential,2.88888888889,heatpump,5,MUS$2010/PJ,2040 -A1,4,newcapa,PRODUCT|ENERGY,heat,all-week,R1,evening,2035,all-year,R1,residential,1.77777777778,heatpump,5,MUS$2010/PJ,2040 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2020,all-year,R1,power,0.05000000000,gasCCGT,0,MUS$2010/PJ,2040 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2020,all-year,R1,power,0.22222222222,solarPV,0,MUS$2010/PJ,2040 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2020,all-year,R1,power,0.06666666667,windturbine,0,MUS$2010/PJ,2040 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2025,all-year,R1,power,0.15000000000,gasCCGT,0,MUS$2010/PJ,2040 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2025,all-year,R1,power,0.08888888889,solarPV,0,MUS$2010/PJ,2040 -A1,5,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2030,all-year,R1,power,0.22222222222,solarPV,0,MUS$2010/PJ,2040 -A1,6,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2035,all-year,R1,power,0.13333333333,solarPV,0,MUS$2010/PJ,2040 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,night,2020,all-year,R1,power,4.58350000000,gasCCGT,0,MUS$2010/kt,2040 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,night,2025,all-year,R1,power,13.75050000000,gasCCGT,0,MUS$2010/kt,2040 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2020,all-year,R1,power,0.07500000000,gasCCGT,1,MUS$2010/PJ,2040 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2020,all-year,R1,power,0.33333333333,solarPV,1,MUS$2010/PJ,2040 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2020,all-year,R1,power,0.10000000000,windturbine,1,MUS$2010/PJ,2040 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2025,all-year,R1,power,0.22500000000,gasCCGT,1,MUS$2010/PJ,2040 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2025,all-year,R1,power,0.13333333333,solarPV,1,MUS$2010/PJ,2040 -A1,5,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2030,all-year,R1,power,0.33333333333,solarPV,1,MUS$2010/PJ,2040 -A1,6,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2035,all-year,R1,power,0.20000000000,solarPV,1,MUS$2010/PJ,2040 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,morning,2020,all-year,R1,power,6.87525000000,gasCCGT,1,MUS$2010/kt,2040 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,morning,2025,all-year,R1,power,20.62575000000,gasCCGT,1,MUS$2010/kt,2040 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2020,all-year,R1,power,0.05000000000,gasCCGT,2,MUS$2010/PJ,2040 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2020,all-year,R1,power,0.22222222222,solarPV,2,MUS$2010/PJ,2040 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2020,all-year,R1,power,0.06666666667,windturbine,2,MUS$2010/PJ,2040 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2025,all-year,R1,power,0.15000000000,gasCCGT,2,MUS$2010/PJ,2040 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2025,all-year,R1,power,0.08888888889,solarPV,2,MUS$2010/PJ,2040 -A1,5,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2030,all-year,R1,power,0.22222222222,solarPV,2,MUS$2010/PJ,2040 -A1,6,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2035,all-year,R1,power,0.13333333333,solarPV,2,MUS$2010/PJ,2040 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,afternoon,2020,all-year,R1,power,4.58350000000,gasCCGT,2,MUS$2010/kt,2040 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,afternoon,2025,all-year,R1,power,13.75050000000,gasCCGT,2,MUS$2010/kt,2040 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2020,all-year,R1,power,0.07500000000,gasCCGT,3,MUS$2010/PJ,2040 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2020,all-year,R1,power,0.33333333333,solarPV,3,MUS$2010/PJ,2040 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2020,all-year,R1,power,0.10000000000,windturbine,3,MUS$2010/PJ,2040 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2025,all-year,R1,power,0.22500000000,gasCCGT,3,MUS$2010/PJ,2040 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2025,all-year,R1,power,0.13333333333,solarPV,3,MUS$2010/PJ,2040 -A1,5,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2030,all-year,R1,power,0.33333333333,solarPV,3,MUS$2010/PJ,2040 -A1,6,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2035,all-year,R1,power,0.20000000000,solarPV,3,MUS$2010/PJ,2040 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,early-peak,2020,all-year,R1,power,6.87525000000,gasCCGT,3,MUS$2010/kt,2040 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,early-peak,2025,all-year,R1,power,20.62575000000,gasCCGT,3,MUS$2010/kt,2040 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2020,all-year,R1,power,0.15000000000,gasCCGT,4,MUS$2010/PJ,2040 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2020,all-year,R1,power,0.66666666667,solarPV,4,MUS$2010/PJ,2040 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2020,all-year,R1,power,0.20000000000,windturbine,4,MUS$2010/PJ,2040 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2025,all-year,R1,power,0.45000000000,gasCCGT,4,MUS$2010/PJ,2040 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2025,all-year,R1,power,0.26666666667,solarPV,4,MUS$2010/PJ,2040 -A1,5,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2030,all-year,R1,power,0.66666666667,solarPV,4,MUS$2010/PJ,2040 -A1,6,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2035,all-year,R1,power,0.40000000000,solarPV,4,MUS$2010/PJ,2040 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,late-peak,2020,all-year,R1,power,13.75050000000,gasCCGT,4,MUS$2010/kt,2040 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,late-peak,2025,all-year,R1,power,41.25150000000,gasCCGT,4,MUS$2010/kt,2040 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2020,all-year,R1,power,0.10000000000,gasCCGT,5,MUS$2010/PJ,2040 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2020,all-year,R1,power,0.44444444444,solarPV,5,MUS$2010/PJ,2040 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2020,all-year,R1,power,0.13333333333,windturbine,5,MUS$2010/PJ,2040 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2025,all-year,R1,power,0.30000000000,gasCCGT,5,MUS$2010/PJ,2040 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2025,all-year,R1,power,0.17777777778,solarPV,5,MUS$2010/PJ,2040 -A1,5,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2030,all-year,R1,power,0.44444444444,solarPV,5,MUS$2010/PJ,2040 -A1,6,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2035,all-year,R1,power,0.26666666667,solarPV,5,MUS$2010/PJ,2040 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,evening,2020,all-year,R1,power,9.16700000000,gasCCGT,5,MUS$2010/kt,2040 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,evening,2025,all-year,R1,power,27.50100000000,gasCCGT,5,MUS$2010/kt,2040 -A1,1,newcapa,PRODUCT|ENERGY,gas,all-week,R1,night,2030,all-year,R1,gas,0.33400000000,gassupply1,0,MUS$2010/PJ,2040 -A1,1,newcapa,PRODUCT|ENERGY,gas,all-week,R1,morning,2030,all-year,R1,gas,0.50100000000,gassupply1,1,MUS$2010/PJ,2040 -A1,1,newcapa,PRODUCT|ENERGY,gas,all-week,R1,afternoon,2030,all-year,R1,gas,0.33400000000,gassupply1,2,MUS$2010/PJ,2040 -A1,1,newcapa,PRODUCT|ENERGY,gas,all-week,R1,early-peak,2030,all-year,R1,gas,0.50100000000,gassupply1,3,MUS$2010/PJ,2040 -A1,1,newcapa,PRODUCT|ENERGY,gas,all-week,R1,late-peak,2030,all-year,R1,gas,1.00200000000,gassupply1,4,MUS$2010/PJ,2040 -A1,1,newcapa,PRODUCT|ENERGY,gas,all-week,R1,evening,2030,all-year,R1,gas,0.66800000000,gassupply1,5,MUS$2010/PJ,2040 -A1,4,newcapa,PRODUCT|ENERGY,heat,all-week,R1,night,2035,all-year,R1,residential,0.88888888889,heatpump,0,MUS$2010/PJ,2045 -A1,5,newcapa,PRODUCT|ENERGY,heat,all-week,R1,night,2040,all-year,R1,residential,1.77777777778,heatpump,0,MUS$2010/PJ,2045 -A1,4,newcapa,PRODUCT|ENERGY,heat,all-week,R1,morning,2035,all-year,R1,residential,1.33333333333,heatpump,1,MUS$2010/PJ,2045 -A1,5,newcapa,PRODUCT|ENERGY,heat,all-week,R1,morning,2040,all-year,R1,residential,2.66666666667,heatpump,1,MUS$2010/PJ,2045 -A1,4,newcapa,PRODUCT|ENERGY,heat,all-week,R1,afternoon,2035,all-year,R1,residential,0.88888888889,heatpump,2,MUS$2010/PJ,2045 -A1,5,newcapa,PRODUCT|ENERGY,heat,all-week,R1,afternoon,2040,all-year,R1,residential,1.77777777778,heatpump,2,MUS$2010/PJ,2045 -A1,4,newcapa,PRODUCT|ENERGY,heat,all-week,R1,early-peak,2035,all-year,R1,residential,1.33333333333,heatpump,3,MUS$2010/PJ,2045 -A1,5,newcapa,PRODUCT|ENERGY,heat,all-week,R1,early-peak,2040,all-year,R1,residential,2.66666666667,heatpump,3,MUS$2010/PJ,2045 -A1,4,newcapa,PRODUCT|ENERGY,heat,all-week,R1,late-peak,2035,all-year,R1,residential,2.66666666667,heatpump,4,MUS$2010/PJ,2045 -A1,5,newcapa,PRODUCT|ENERGY,heat,all-week,R1,late-peak,2040,all-year,R1,residential,5.33333333333,heatpump,4,MUS$2010/PJ,2045 -A1,4,newcapa,PRODUCT|ENERGY,heat,all-week,R1,evening,2035,all-year,R1,residential,1.77777777778,heatpump,5,MUS$2010/PJ,2045 -A1,5,newcapa,PRODUCT|ENERGY,heat,all-week,R1,evening,2040,all-year,R1,residential,3.55555555556,heatpump,5,MUS$2010/PJ,2045 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2020,all-year,R1,power,0.05000000000,gasCCGT,0,MUS$2010/PJ,2045 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2020,all-year,R1,power,0.22222222222,solarPV,0,MUS$2010/PJ,2045 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2020,all-year,R1,power,0.06666666667,windturbine,0,MUS$2010/PJ,2045 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2025,all-year,R1,power,0.15000000000,gasCCGT,0,MUS$2010/PJ,2045 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2025,all-year,R1,power,0.08888888889,solarPV,0,MUS$2010/PJ,2045 -A1,5,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2030,all-year,R1,power,0.22222222222,solarPV,0,MUS$2010/PJ,2045 -A1,6,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2035,all-year,R1,power,0.13333333333,solarPV,0,MUS$2010/PJ,2045 -A1,7,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2040,all-year,R1,power,0.13333333333,solarPV,0,MUS$2010/PJ,2045 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,night,2020,all-year,R1,power,4.58350000000,gasCCGT,0,MUS$2010/kt,2045 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,night,2025,all-year,R1,power,13.75050000000,gasCCGT,0,MUS$2010/kt,2045 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2020,all-year,R1,power,0.07500000000,gasCCGT,1,MUS$2010/PJ,2045 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2020,all-year,R1,power,0.33333333333,solarPV,1,MUS$2010/PJ,2045 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2020,all-year,R1,power,0.10000000000,windturbine,1,MUS$2010/PJ,2045 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2025,all-year,R1,power,0.22500000000,gasCCGT,1,MUS$2010/PJ,2045 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2025,all-year,R1,power,0.13333333333,solarPV,1,MUS$2010/PJ,2045 -A1,5,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2030,all-year,R1,power,0.33333333333,solarPV,1,MUS$2010/PJ,2045 -A1,6,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2035,all-year,R1,power,0.20000000000,solarPV,1,MUS$2010/PJ,2045 -A1,7,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2040,all-year,R1,power,0.20000000000,solarPV,1,MUS$2010/PJ,2045 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,morning,2020,all-year,R1,power,6.87525000000,gasCCGT,1,MUS$2010/kt,2045 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,morning,2025,all-year,R1,power,20.62575000000,gasCCGT,1,MUS$2010/kt,2045 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2020,all-year,R1,power,0.05000000000,gasCCGT,2,MUS$2010/PJ,2045 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2020,all-year,R1,power,0.22222222222,solarPV,2,MUS$2010/PJ,2045 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2020,all-year,R1,power,0.06666666667,windturbine,2,MUS$2010/PJ,2045 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2025,all-year,R1,power,0.15000000000,gasCCGT,2,MUS$2010/PJ,2045 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2025,all-year,R1,power,0.08888888889,solarPV,2,MUS$2010/PJ,2045 -A1,5,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2030,all-year,R1,power,0.22222222222,solarPV,2,MUS$2010/PJ,2045 -A1,6,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2035,all-year,R1,power,0.13333333333,solarPV,2,MUS$2010/PJ,2045 -A1,7,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2040,all-year,R1,power,0.13333333333,solarPV,2,MUS$2010/PJ,2045 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,afternoon,2020,all-year,R1,power,4.58350000000,gasCCGT,2,MUS$2010/kt,2045 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,afternoon,2025,all-year,R1,power,13.75050000000,gasCCGT,2,MUS$2010/kt,2045 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2020,all-year,R1,power,0.07500000000,gasCCGT,3,MUS$2010/PJ,2045 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2020,all-year,R1,power,0.33333333333,solarPV,3,MUS$2010/PJ,2045 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2020,all-year,R1,power,0.10000000000,windturbine,3,MUS$2010/PJ,2045 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2025,all-year,R1,power,0.22500000000,gasCCGT,3,MUS$2010/PJ,2045 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2025,all-year,R1,power,0.13333333333,solarPV,3,MUS$2010/PJ,2045 -A1,5,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2030,all-year,R1,power,0.33333333333,solarPV,3,MUS$2010/PJ,2045 -A1,6,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2035,all-year,R1,power,0.20000000000,solarPV,3,MUS$2010/PJ,2045 -A1,7,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2040,all-year,R1,power,0.20000000000,solarPV,3,MUS$2010/PJ,2045 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,early-peak,2020,all-year,R1,power,6.87525000000,gasCCGT,3,MUS$2010/kt,2045 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,early-peak,2025,all-year,R1,power,20.62575000000,gasCCGT,3,MUS$2010/kt,2045 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2020,all-year,R1,power,0.15000000000,gasCCGT,4,MUS$2010/PJ,2045 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2020,all-year,R1,power,0.66666666667,solarPV,4,MUS$2010/PJ,2045 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2020,all-year,R1,power,0.20000000000,windturbine,4,MUS$2010/PJ,2045 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2025,all-year,R1,power,0.45000000000,gasCCGT,4,MUS$2010/PJ,2045 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2025,all-year,R1,power,0.26666666667,solarPV,4,MUS$2010/PJ,2045 -A1,5,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2030,all-year,R1,power,0.66666666667,solarPV,4,MUS$2010/PJ,2045 -A1,6,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2035,all-year,R1,power,0.40000000000,solarPV,4,MUS$2010/PJ,2045 -A1,7,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2040,all-year,R1,power,0.40000000000,solarPV,4,MUS$2010/PJ,2045 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,late-peak,2020,all-year,R1,power,13.75050000000,gasCCGT,4,MUS$2010/kt,2045 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,late-peak,2025,all-year,R1,power,41.25150000000,gasCCGT,4,MUS$2010/kt,2045 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2020,all-year,R1,power,0.10000000000,gasCCGT,5,MUS$2010/PJ,2045 -A1,1,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2020,all-year,R1,power,0.44444444444,solarPV,5,MUS$2010/PJ,2045 -A1,2,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2020,all-year,R1,power,0.13333333333,windturbine,5,MUS$2010/PJ,2045 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2025,all-year,R1,power,0.30000000000,gasCCGT,5,MUS$2010/PJ,2045 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2025,all-year,R1,power,0.17777777778,solarPV,5,MUS$2010/PJ,2045 -A1,5,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2030,all-year,R1,power,0.44444444444,solarPV,5,MUS$2010/PJ,2045 -A1,6,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2035,all-year,R1,power,0.26666666667,solarPV,5,MUS$2010/PJ,2045 -A1,7,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2040,all-year,R1,power,0.26666666667,solarPV,5,MUS$2010/PJ,2045 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,evening,2020,all-year,R1,power,9.16700000000,gasCCGT,5,MUS$2010/kt,2045 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,evening,2025,all-year,R1,power,27.50100000000,gasCCGT,5,MUS$2010/kt,2045 -A1,1,newcapa,PRODUCT|ENERGY,gas,all-week,R1,night,2030,all-year,R1,gas,0.33400000000,gassupply1,0,MUS$2010/PJ,2045 -A1,1,newcapa,PRODUCT|ENERGY,gas,all-week,R1,morning,2030,all-year,R1,gas,0.50100000000,gassupply1,1,MUS$2010/PJ,2045 -A1,1,newcapa,PRODUCT|ENERGY,gas,all-week,R1,afternoon,2030,all-year,R1,gas,0.33400000000,gassupply1,2,MUS$2010/PJ,2045 -A1,1,newcapa,PRODUCT|ENERGY,gas,all-week,R1,early-peak,2030,all-year,R1,gas,0.50100000000,gassupply1,3,MUS$2010/PJ,2045 -A1,1,newcapa,PRODUCT|ENERGY,gas,all-week,R1,late-peak,2030,all-year,R1,gas,1.00200000000,gassupply1,4,MUS$2010/PJ,2045 -A1,1,newcapa,PRODUCT|ENERGY,gas,all-week,R1,evening,2030,all-year,R1,gas,0.66800000000,gassupply1,5,MUS$2010/PJ,2045 -A1,5,newcapa,PRODUCT|ENERGY,heat,all-week,R1,night,2040,all-year,R1,residential,1.77777777778,heatpump,0,MUS$2010/PJ,2050 -A1,6,newcapa,PRODUCT|ENERGY,heat,all-week,R1,night,2045,all-year,R1,residential,1.22222222222,heatpump,0,MUS$2010/PJ,2050 -A1,5,newcapa,PRODUCT|ENERGY,heat,all-week,R1,morning,2040,all-year,R1,residential,2.66666666667,heatpump,1,MUS$2010/PJ,2050 -A1,6,newcapa,PRODUCT|ENERGY,heat,all-week,R1,morning,2045,all-year,R1,residential,1.83333333333,heatpump,1,MUS$2010/PJ,2050 -A1,5,newcapa,PRODUCT|ENERGY,heat,all-week,R1,afternoon,2040,all-year,R1,residential,1.77777777778,heatpump,2,MUS$2010/PJ,2050 -A1,6,newcapa,PRODUCT|ENERGY,heat,all-week,R1,afternoon,2045,all-year,R1,residential,1.22222222222,heatpump,2,MUS$2010/PJ,2050 -A1,5,newcapa,PRODUCT|ENERGY,heat,all-week,R1,early-peak,2040,all-year,R1,residential,2.66666666667,heatpump,3,MUS$2010/PJ,2050 -A1,6,newcapa,PRODUCT|ENERGY,heat,all-week,R1,early-peak,2045,all-year,R1,residential,1.83333333333,heatpump,3,MUS$2010/PJ,2050 -A1,5,newcapa,PRODUCT|ENERGY,heat,all-week,R1,late-peak,2040,all-year,R1,residential,5.33333333333,heatpump,4,MUS$2010/PJ,2050 -A1,6,newcapa,PRODUCT|ENERGY,heat,all-week,R1,late-peak,2045,all-year,R1,residential,3.66666666667,heatpump,4,MUS$2010/PJ,2050 -A1,5,newcapa,PRODUCT|ENERGY,heat,all-week,R1,evening,2040,all-year,R1,residential,3.55555555556,heatpump,5,MUS$2010/PJ,2050 -A1,6,newcapa,PRODUCT|ENERGY,heat,all-week,R1,evening,2045,all-year,R1,residential,2.44444444444,heatpump,5,MUS$2010/PJ,2050 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2020,all-year,R1,power,0.05192307692,gasCCGT,0,MUS$2010/PJ,2050 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2025,all-year,R1,power,0.15576923077,gasCCGT,0,MUS$2010/PJ,2050 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2025,all-year,R1,power,0.09230769231,solarPV,0,MUS$2010/PJ,2050 -A1,5,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2030,all-year,R1,power,0.23076923077,solarPV,0,MUS$2010/PJ,2050 -A1,6,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2035,all-year,R1,power,0.13846153846,solarPV,0,MUS$2010/PJ,2050 -A1,7,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2040,all-year,R1,power,0.13846153846,solarPV,0,MUS$2010/PJ,2050 -A1,9,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2045,all-year,R1,power,0.23076923077,solarPV,0,MUS$2010/PJ,2050 -A1,10,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,night,2045,all-year,R1,power,0.16153846154,windturbine,0,MUS$2010/PJ,2050 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,night,2020,all-year,R1,power,4.75978846154,gasCCGT,0,MUS$2010/kt,2050 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,night,2025,all-year,R1,power,14.27936538462,gasCCGT,0,MUS$2010/kt,2050 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2020,all-year,R1,power,0.07788461538,gasCCGT,1,MUS$2010/PJ,2050 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2025,all-year,R1,power,0.23365384615,gasCCGT,1,MUS$2010/PJ,2050 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2025,all-year,R1,power,0.13846153846,solarPV,1,MUS$2010/PJ,2050 -A1,5,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2030,all-year,R1,power,0.34615384615,solarPV,1,MUS$2010/PJ,2050 -A1,6,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2035,all-year,R1,power,0.20769230769,solarPV,1,MUS$2010/PJ,2050 -A1,7,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2040,all-year,R1,power,0.20769230769,solarPV,1,MUS$2010/PJ,2050 -A1,9,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2045,all-year,R1,power,0.34615384615,solarPV,1,MUS$2010/PJ,2050 -A1,10,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,morning,2045,all-year,R1,power,0.24230769231,windturbine,1,MUS$2010/PJ,2050 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,morning,2020,all-year,R1,power,7.13968269231,gasCCGT,1,MUS$2010/kt,2050 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,morning,2025,all-year,R1,power,21.41904807692,gasCCGT,1,MUS$2010/kt,2050 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2020,all-year,R1,power,0.05192307692,gasCCGT,2,MUS$2010/PJ,2050 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2025,all-year,R1,power,0.15576923077,gasCCGT,2,MUS$2010/PJ,2050 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2025,all-year,R1,power,0.09230769231,solarPV,2,MUS$2010/PJ,2050 -A1,5,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2030,all-year,R1,power,0.23076923077,solarPV,2,MUS$2010/PJ,2050 -A1,6,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2035,all-year,R1,power,0.13846153846,solarPV,2,MUS$2010/PJ,2050 -A1,7,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2040,all-year,R1,power,0.13846153846,solarPV,2,MUS$2010/PJ,2050 -A1,9,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2045,all-year,R1,power,0.23076923077,solarPV,2,MUS$2010/PJ,2050 -A1,10,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,afternoon,2045,all-year,R1,power,0.16153846154,windturbine,2,MUS$2010/PJ,2050 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,afternoon,2020,all-year,R1,power,4.75978846154,gasCCGT,2,MUS$2010/kt,2050 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,afternoon,2025,all-year,R1,power,14.27936538462,gasCCGT,2,MUS$2010/kt,2050 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2020,all-year,R1,power,0.07788461538,gasCCGT,3,MUS$2010/PJ,2050 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2025,all-year,R1,power,0.23365384615,gasCCGT,3,MUS$2010/PJ,2050 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2025,all-year,R1,power,0.13846153846,solarPV,3,MUS$2010/PJ,2050 -A1,5,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2030,all-year,R1,power,0.34615384615,solarPV,3,MUS$2010/PJ,2050 -A1,6,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2035,all-year,R1,power,0.20769230769,solarPV,3,MUS$2010/PJ,2050 -A1,7,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2040,all-year,R1,power,0.20769230769,solarPV,3,MUS$2010/PJ,2050 -A1,9,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2045,all-year,R1,power,0.34615384615,solarPV,3,MUS$2010/PJ,2050 -A1,10,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,early-peak,2045,all-year,R1,power,0.24230769231,windturbine,3,MUS$2010/PJ,2050 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,early-peak,2020,all-year,R1,power,7.13968269231,gasCCGT,3,MUS$2010/kt,2050 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,early-peak,2025,all-year,R1,power,21.41904807692,gasCCGT,3,MUS$2010/kt,2050 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2020,all-year,R1,power,0.15000000000,gasCCGT,4,MUS$2010/PJ,2050 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2025,all-year,R1,power,0.45000000000,gasCCGT,4,MUS$2010/PJ,2050 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2025,all-year,R1,power,0.26666666667,solarPV,4,MUS$2010/PJ,2050 -A1,5,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2030,all-year,R1,power,0.66666666667,solarPV,4,MUS$2010/PJ,2050 -A1,6,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2035,all-year,R1,power,0.40000000000,solarPV,4,MUS$2010/PJ,2050 -A1,7,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2040,all-year,R1,power,0.40000000000,solarPV,4,MUS$2010/PJ,2050 -A1,9,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2045,all-year,R1,power,0.66666666667,solarPV,4,MUS$2010/PJ,2050 -A1,10,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,late-peak,2045,all-year,R1,power,0.46666666667,windturbine,4,MUS$2010/PJ,2050 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,late-peak,2020,all-year,R1,power,13.75050000000,gasCCGT,4,MUS$2010/kt,2050 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,late-peak,2025,all-year,R1,power,41.25150000000,gasCCGT,4,MUS$2010/kt,2050 -A1,0,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2020,all-year,R1,power,0.10384615385,gasCCGT,5,MUS$2010/PJ,2050 -A1,3,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2025,all-year,R1,power,0.31153846154,gasCCGT,5,MUS$2010/PJ,2050 -A1,4,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2025,all-year,R1,power,0.18461538462,solarPV,5,MUS$2010/PJ,2050 -A1,5,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2030,all-year,R1,power,0.46153846154,solarPV,5,MUS$2010/PJ,2050 -A1,6,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2035,all-year,R1,power,0.27692307692,solarPV,5,MUS$2010/PJ,2050 -A1,7,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2040,all-year,R1,power,0.27692307692,solarPV,5,MUS$2010/PJ,2050 -A1,9,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2045,all-year,R1,power,0.46153846154,solarPV,5,MUS$2010/PJ,2050 -A1,10,newcapa,PRODUCT|ENERGY,electricity,all-week,R1,evening,2045,all-year,R1,power,0.32307692308,windturbine,5,MUS$2010/PJ,2050 -A1,0,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,evening,2020,all-year,R1,power,9.51957692308,gasCCGT,5,MUS$2010/kt,2050 -A1,3,newcapa,PRODUCT|ENVIRONMENTAL,CO2f,all-week,R1,evening,2025,all-year,R1,power,28.55873076923,gasCCGT,5,MUS$2010/kt,2050 -A1,1,newcapa,PRODUCT|ENERGY,gas,all-week,R1,night,2030,all-year,R1,gas,0.34684615385,gassupply1,0,MUS$2010/PJ,2050 -A1,1,newcapa,PRODUCT|ENERGY,gas,all-week,R1,morning,2030,all-year,R1,gas,0.52026923077,gassupply1,1,MUS$2010/PJ,2050 -A1,1,newcapa,PRODUCT|ENERGY,gas,all-week,R1,afternoon,2030,all-year,R1,gas,0.34684615385,gassupply1,2,MUS$2010/PJ,2050 -A1,1,newcapa,PRODUCT|ENERGY,gas,all-week,R1,early-peak,2030,all-year,R1,gas,0.52026923077,gassupply1,3,MUS$2010/PJ,2050 -A1,1,newcapa,PRODUCT|ENERGY,gas,all-week,R1,late-peak,2030,all-year,R1,gas,1.00200000000,gassupply1,4,MUS$2010/PJ,2050 -A1,1,newcapa,PRODUCT|ENERGY,gas,all-week,R1,evening,2030,all-year,R1,gas,0.69369230769,gassupply1,5,MUS$2010/PJ,2050 diff --git a/docs/tutorial-code/new-decision-metric/settings.toml b/docs/tutorial-code/new-decision-metric/settings.toml index 73e22677f..12d7ad113 100644 --- a/docs/tutorial-code/new-decision-metric/settings.toml +++ b/docs/tutorial-code/new-decision-metric/settings.toml @@ -23,16 +23,6 @@ quantity = "capacity" sink = "aggregate" filename = "{cwd}/{default_output_dir}/MCA{Quantity}.csv" -[[outputs]] -quantity = "timeslice_consumption" -sink = "aggregate" -filename = "{cwd}/{default_output_dir}/MCA{Quantity}.csv" - -[[outputs]] -quantity = "timeslice_supply" -sink = "aggregate" -filename = "{cwd}/{default_output_dir}/MCA{Quantity}.csv" - [carbon_budget_control] budget = [] @@ -51,7 +41,7 @@ commodities_out = '{path}/technodata/residential/CommOut.csv' [sectors.residential.subsectors.all] agents = '{path}/technodata/Agents.csv' existing_capacity = '{path}/technodata/residential/ExistingCapacity.csv' -lpsolver = "scipy" # Optional, defaults to "adhoc" +lpsolver = "scipy" # Optional, defaults to "scipy" constraints = [ # Optional, defaults to the constraints below "max_production", diff --git a/pyproject.toml b/pyproject.toml index 3c1367527..4babc60a2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,7 @@ name = "MUSE_OS" description = "Energy System Model" readme = "README.md" license = {file = "LICENSE"} -requires-python = ">= 3.9, <3.13" +requires-python = ">= 3.9, <3.14" keywords = ["energy", "modelling"] classifiers = [ "Development Status :: 4 - Beta", @@ -19,12 +19,13 @@ classifiers = [ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Intended Audience :: Science/Research", "Intended Audience :: Other Audience", "License :: OSI Approved :: GNU General Public License v3 (GPLv3)" ] dependencies = [ - "numpy==2.0", + "numpy>=2.0", "scipy>=1.13", "pandas>=2.2", "xarray>=2024.6", @@ -54,6 +55,7 @@ dev = [ ] doc = [ "sphinx", + "sphinx-rtd-theme", "ipykernel", "nbsphinx", "myst-parser", diff --git a/src/muse/__init__.py b/src/muse/__init__.py index 5703279a0..c53ec7402 100644 --- a/src/muse/__init__.py +++ b/src/muse/__init__.py @@ -2,7 +2,7 @@ import os -VERSION = "1.2.3" +VERSION = "1.3.0" def _create_logger(color: bool = True): @@ -89,7 +89,6 @@ def add_file_logger() -> None: "read_technodictionary", "read_technologies", "read_timeslice_shares", - "read_csv_timeslices", "read_settings", "read_macro_drivers", "read_csv_agent_parameters", @@ -102,6 +101,5 @@ def add_file_logger() -> None: "objectives", "outputs", "sectors", - "legacy_sectors", VERSION, ] diff --git a/src/muse/__main__.py b/src/muse/__main__.py index 6108b0b8a..b7386dc06 100644 --- a/src/muse/__main__.py +++ b/src/muse/__main__.py @@ -62,5 +62,51 @@ def run(): muse_main(args.settings, args.model, args.copy) +def patched_broadcast_compat_data(self, other): + """Patch for xarray.core.variable._broadcast_compat_data. + + This has been introduced to disallow automatic broadcasting along the 'timeslice' + dimension. + + If `self` and `other` differ in whether they have a 'timeslice' dimension (in which + case automatic broadcasting would normally be performed), an error is raised. + + In this case, developers must explicitly handle broadcasting by calling either + `broadcast_timeslice` or `distribute_timeslice` (see `muse.timeslices`). The + appropriate choice of operation will depend on the context and the quantity in + question. + """ + from xarray.core.variable import Variable, _broadcast_compat_variables + + if (isinstance(other, Variable)) and ("timeslice" in self.dims) != ( + "timeslice" in getattr(other, "dims", []) + ): + raise ValueError( + "Broadcasting along the 'timeslice' dimension is required, but automatic " + "broadcasting is disabled. Please handle it explicitly using " + "`broadcast_timeslice` or `distribute_timeslice` (see `muse.timeslices`)." + ) + + # The rest of the function is copied directly from + # xarray.core.variable._broadcast_compat_data + if all(hasattr(other, attr) for attr in ["dims", "data", "shape", "encoding"]): + # `other` satisfies the necessary Variable API for broadcast_variables + new_self, new_other = _broadcast_compat_variables(self, other) + self_data = new_self.data + other_data = new_other.data + dims = new_self.dims + else: + # rely on numpy broadcasting rules + self_data = self.data + other_data = other + dims = self.dims + return self_data, other_data, dims + + if "__main__" == __name__: - run() + from unittest.mock import patch + + with patch( + "xarray.core.variable._broadcast_compat_data", patched_broadcast_compat_data + ): + run() diff --git a/src/muse/agents/__init__.py b/src/muse/agents/__init__.py index d59b95029..392b04e73 100644 --- a/src/muse/agents/__init__.py +++ b/src/muse/agents/__init__.py @@ -4,8 +4,7 @@ "InvestingAgent", "agents_factory", "create_agent", - "factory", ] from muse.agents.agent import AbstractAgent, Agent, InvestingAgent -from muse.agents.factories import agents_factory, create_agent, factory +from muse.agents.factories import agents_factory, create_agent diff --git a/src/muse/agents/agent.py b/src/muse/agents/agent.py index fb34dffb4..db0e8a539 100644 --- a/src/muse/agents/agent.py +++ b/src/muse/agents/agent.py @@ -23,6 +23,7 @@ def __init__( interpolation: str = "linear", category: Optional[str] = None, quantity: Optional[float] = 1, + timeslice_level: Optional[str] = None, ): """Creates a standard MUSE agent. @@ -39,6 +40,9 @@ def __init__( together. quantity: optional value to classify different agents' share of the population. + timeslice_level: the timeslice level over which investments/production + will be optimized (e.g "hour", "day"). If None, the agent will use the + finest timeslice level. """ from uuid import uuid4 @@ -57,6 +61,8 @@ def __init__( """Attribute to classify different sets of agents.""" self.quantity = quantity """Attribute to classify different agents' share of the population.""" + self.timeslice_level = timeslice_level + """Timeslice level for the agent.""" def filter_input( self, @@ -80,14 +86,9 @@ def next( technologies: xr.Dataset, market: xr.Dataset, demand: xr.DataArray, - time_period: int = 1, - ): - """Iterates agent one turn. - - The goal is to figure out from market variables which technologies to invest in - and by how much. - """ - pass + time_period: int, + ) -> None: + """Increments agent to the next time point (e.g. performing investments).""" def __repr__(self): return ( @@ -98,10 +99,7 @@ def __repr__(self): class Agent(AbstractAgent): - """Agent that is capable of computing a search-space and a cost metric. - - This agent will not perform any investment itself. - """ + """Standard agent that does not perform investments.""" def __init__( self, @@ -122,9 +120,10 @@ def __init__( asset_threshold: float = 1e-4, quantity: Optional[float] = 1, spend_limit: int = 0, + timeslice_level: Optional[str] = None, **kwargs, ): - """Creates a standard buildings agent. + """Creates a standard agent. Arguments: name: Name of the agent, used for cross-refencing external tables @@ -149,6 +148,9 @@ def __init__( asset_threshold: Threshold below which assets are not added. quantity: different agents' share of the population spend_limit: The cost above which agents will not invest + timeslice_level: the timeslice level over which the agent invesments will + be optimized (e.g "hour", "day"). If None, the agent will use the finest + timeslice level. **kwargs: Extra arguments """ from muse.decisions import factory as decision_factory @@ -163,13 +165,11 @@ def __init__( interpolation=interpolation, category=category, quantity=quantity, + timeslice_level=timeslice_level, ) self.year = year - """ Current year. - - The year is incremented by one every time next is called. - """ + """ Current year. Incremented by one every time next is called.""" self.forecast = forecast """Number of years to look into the future for forecating purposed.""" if search_rules is None: @@ -250,8 +250,46 @@ def next( technologies: xr.Dataset, market: xr.Dataset, demand: xr.DataArray, - time_period: int = 1, - ) -> Optional[xr.Dataset]: + time_period: int, + ) -> None: + self.year += time_period + + +class InvestingAgent(Agent): + """Agent that performs investment for itself.""" + + def __init__( + self, + *args, + constraints: Optional[Callable] = None, + investment: Optional[Callable] = None, + **kwargs, + ): + """Creates an investing agent. + + Arguments: + *args: See :py:class:`~muse.agents.agent.Agent` + constraints: Set of constraints limiting investment + investment: A function to perform investments + **kwargs: See :py:class:`~muse.agents.agent.Agent` + """ + from muse.constraints import factory as csfactory + from muse.investments import factory as ifactory + + super().__init__(*args, **kwargs) + + self.invest = investment or ifactory() + """Method to use when fulfilling demand from rated set of techs.""" + self.constraints = constraints or csfactory() + """Creates a set of constraints limiting investment.""" + + def next( + self, + technologies: xr.Dataset, + market: xr.Dataset, + demand: xr.DataArray, + time_period: int, + ) -> None: """Iterates agent one turn. The goal is to figure out from market variables which technologies to @@ -263,22 +301,77 @@ def next( """ from logging import getLogger - # dataset with intermediate computational results from search - # makes it easier to pass intermediate results to functions, as well as - # filter them when inside a function + current_year = self.year + + # Skip forward if demand is zero if demand.size == 0 or demand.sum() < 1e-12: self.year += time_period return None + # Calculate the search space search_space = ( self.search_rules(self, demand, technologies, market).fillna(0).astype(int) ) + # Skip forward if the search space is empty if any(u == 0 for u in search_space.shape): getLogger(__name__).critical("Search space is empty") self.year += time_period return None + # Calculate the decision metric + decision = self.compute_decision(technologies, market, demand, search_space) + search = xr.Dataset(dict(search_space=search_space, decision=decision)) + if "timeslice" in search.dims: + search["demand"] = drop_timeslice(demand) + else: + search["demand"] = demand + + # Filter assets with demand + not_assets = [u for u in search.demand.dims if u != "asset"] + condtechs = ( + search.demand.sum(not_assets) > getattr(self, "tolerance", 1e-8) + ).values + search = search.sel(asset=condtechs) + + # Calculate constraints + constraints = self.constraints( + search.demand, + self.assets, + search.search_space, + market, + technologies, + year=current_year, + timeslice_level=self.timeslice_level, + ) + + # Calculate investments + investments = self.invest( + search[["search_space", "decision"]], + technologies, + constraints, + year=current_year, + timeslice_level=self.timeslice_level, + ) + + # Add investments + self.add_investments( + technologies, + investments, + current_year=current_year, + time_period=time_period, + ) + + # Increment the year + self.year += time_period + + def compute_decision( + self, + technologies: xr.Dataset, + market: xr.Dataset, + demand: xr.DataArray, + search_space: xr.DataArray, + ) -> xr.DataArray: # Filter technologies according to the search space, forecast year and region techs = self.filter_input( technologies, @@ -297,23 +390,15 @@ def next( # Filter prices according to the region prices = self.filter_input(market.prices) - # Compute the objective - decision = self._compute_objective( - technologies=techs, demand=reduced_demand, prices=prices - ) - - self.year += time_period - return xr.Dataset(dict(search_space=search_space, decision=decision)) - - def _compute_objective( - self, - technologies: xr.Dataset, - demand: xr.DataArray, - prices: xr.DataArray, - ) -> xr.DataArray: + # Compute the objectives objectives = self.objectives( - technologies=technologies, demand=demand, prices=prices + technologies=techs, + demand=reduced_demand, + prices=prices, + timeslice_level=self.timeslice_level, ) + + # Compute the decision metric decision = self.decision(objectives) return decision @@ -323,12 +408,12 @@ def add_investments( investments: xr.DataArray, current_year: int, time_period: int, - ): + ) -> None: """Add new assets to the agent.""" + # Calculate retirement profile of new assets new_capacity = self.retirement_profile( technologies, investments, current_year, time_period ) - if new_capacity is None: return new_capacity = new_capacity.drop_vars( @@ -336,6 +421,7 @@ def add_investments( ) new_assets = xr.Dataset(dict(capacity=new_capacity)) + # Merge new assets with existing assets self.assets = self.merge_transform(self.assets, new_assets) def retirement_profile( @@ -347,10 +433,13 @@ def retirement_profile( ) -> Optional[xr.DataArray]: from muse.investments import cliff_retirement_profile + # Sum investments if "asset" in investments.dims: investments = investments.sum("asset") if "agent" in investments.dims: investments = investments.squeeze("agent", drop=True) + + # Filter out investments below the threshold investments = investments.sel( replacement=(investments > self.asset_threshold).any( [d for d in investments.dims if d != "replacement"] @@ -359,22 +448,22 @@ def retirement_profile( if investments.size == 0: return None - # figures out the retirement profile for the new investments + # Calculate the retirement profile for new investments + # Note: technical life must be at least the length of the time period lifetime = self.filter_input( technologies.technical_life, year=current_year, technology=investments.replacement, - ) + ).clip(min=time_period) profile = cliff_retirement_profile( - lifetime.clip(min=time_period), - current_year=current_year + time_period, - protected=max(self.forecast - time_period - 1, 0), + lifetime, + investment_year=current_year + time_period, ) if "dst_region" in investments.coords: investments = investments.reindex_like(profile, method="ffill") + # Apply the retirement profile to the investments new_assets = (investments * profile).rename(replacement="asset") - new_assets["installed"] = "asset", [current_year] * len(new_assets.asset) # The new assets have picked up quite a few coordinates along the way. @@ -383,89 +472,3 @@ def retirement_profile( new, old = new_assets.dims, self.assets.dims raise RuntimeError(f"Asset dimensions do not match: {new} vs {old}") return new_assets - - -class InvestingAgent(Agent): - """Agent that performs investment for itself.""" - - def __init__( - self, - *args, - constraints: Optional[Callable] = None, - investment: Optional[Callable] = None, - **kwargs, - ): - """Creates a standard buildings agent. - - Arguments: - *args: See :py:class:`~muse.agents.agent.Agent` - constraints: Set of constraints limiting investment - investment: A function to perform investments - **kwargs: See :py:class:`~muse.agents.agent.Agent` - """ - from muse.constraints import factory as csfactory - from muse.investments import factory as ifactory - - super().__init__(*args, **kwargs) - - if investment is None: - investment = ifactory() - self.invest = investment - """Method to use when fulfilling demand from rated set of techs.""" - if not callable(constraints): - constraints = csfactory() - self.constraints = constraints - """Creates a set of constraints limiting investment.""" - - def next( - self, - technologies: xr.Dataset, - market: xr.Dataset, - demand: xr.DataArray, - time_period: int = 1, - ): - """Iterates agent one turn. - - The goal is to figure out from market variables which technologies to - invest in and by how much. - - This function will modify `self.assets` and increment `self.year`. - Other attributes are left unchanged. Arguments to the function are - never modified. - """ - current_year = self.year - search = super().next(technologies, market, demand, time_period=time_period) - if search is None: - return None - - if "timeslice" in search.dims: - search["demand"] = drop_timeslice(demand) - else: - search["demand"] = demand - not_assets = [u for u in search.demand.dims if u != "asset"] - condtechs = ( - search.demand.sum(not_assets) > getattr(self, "tolerance", 1e-8) - ).values - search = search.sel(asset=condtechs) - constraints = self.constraints( - search.demand, - self.assets, - search.search_space, - market, - technologies, - year=current_year, - ) - - investments = self.invest( - search[["search_space", "decision"]], - technologies, - constraints, - year=current_year, - ) - - self.add_investments( - technologies, - investments, - current_year=self.year - time_period, - time_period=time_period, - ) diff --git a/src/muse/agents/factories.py b/src/muse/agents/factories.py index 08549cb8b..66d0b79c5 100644 --- a/src/muse/agents/factories.py +++ b/src/muse/agents/factories.py @@ -7,7 +7,6 @@ import xarray as xr from muse.agents.agent import Agent, InvestingAgent -from muse.defaults import DEFAULT_SECTORS_DIRECTORY from muse.errors import AgentShareNotDefined, TechnologyNotDefined @@ -20,7 +19,7 @@ def create_standard_agent( interpolation: str = "linear", **kwargs, ): - """Creates retrofit agent from muse primitives.""" + """Creates standard (noninvesting) agent from muse primitives.""" from muse.filters import factory as filter_factory if share is not None: @@ -173,96 +172,6 @@ def create_agent(agent_type: str, **kwargs) -> Agent: return method(**kwargs) # type: ignore -def factory( - existing_capacity_path: Optional[Union[Path, str]] = None, - agent_parameters_path: Optional[Union[Path, str]] = None, - technodata_path: Optional[Union[Path, str]] = None, - technodata_timeslices_path: Optional[Union[str, Path]] = None, - sector: Optional[str] = None, - sectors_directory: Union[str, Path] = DEFAULT_SECTORS_DIRECTORY, - baseyear: int = 2010, -) -> list[Agent]: - """Reads list of agents from standard MUSE input files.""" - from copy import deepcopy - from logging import getLogger - from textwrap import dedent - - from muse.readers import ( - read_csv_agent_parameters, - read_initial_assets, - read_technodata_timeslices, - read_technodictionary, - ) - from muse.readers.csv import find_sectors_file - - if sector is None: - assert existing_capacity_path is not None - assert agent_parameters_path is not None - assert technodata_path is not None - - if existing_capacity_path is None: - existing_capacity_path = find_sectors_file( - f"Existing{sector}.csv", sector, sectors_directory - ) - if agent_parameters_path is None: - agent_parameters_path = find_sectors_file( - f"BuildingAgent{sector}.csv", sector, sectors_directory - ) - if technodata_path is None: - technodata_path = find_sectors_file( - f"technodata{sector}.csv", sector, sectors_directory - ) - - params = read_csv_agent_parameters(agent_parameters_path) - techno = read_technodictionary(technodata_path) - capa = read_initial_assets(existing_capacity_path) - if technodata_timeslices_path and isinstance( - technodata_timeslices_path, (str, Path) - ): - technodata_timeslices = read_technodata_timeslices(technodata_timeslices_path) - else: - technodata_timeslices = None - result = [] - for param in params: - if param["agent_type"] == "retrofit": - param["technologies"] = techno.sel(region=param["region"]) - if technodata_timeslices is not None: - param.drop_vars("utilization_factor") - param = param.merge(technodata_timeslices.sel(region=param["region"])) - param["category"] = param["agent_type"] - param["capacity"] = deepcopy(capa.sel(region=param["region"])) - param["year"] = baseyear - result.append(create_agent(**param)) - - nregs = len({u.region for u in result}) - types = [u.name for u in result] - msg = dedent( - """\ - Read agents for sector {name} from: - - agent parameter file {para} - - technologies data file {tech} - - initial capacity file {ini} - - Found {n} agents across {nregs} regions{end} - """.format( - n=len(result), - name=sector, - para=agent_parameters_path, - tech=technodata_path, - ini=existing_capacity_path, - nregs=nregs, - end="." if len(result) == 0 else ", with:\n", - ) - ) - for t in set(types): - n = types.count(t) - msg += " - {n} {t} agent{plural}\n".format( - n=n, t=t, plural="" if n == 1 else "s" - ) - getLogger(__name__).info(msg) - return result - - def agents_factory( params_or_path: Union[str, Path, list], capacity: Union[xr.DataArray, str, Path], @@ -394,7 +303,7 @@ def _standardize_inputs( def _standardize_investing_inputs( search_rules: Optional[Union[str, Sequence[str]]] = None, - investment: Union[Callable, str, Mapping] = "adhoc", + investment: Union[Callable, str, Mapping] = "scipy", constraints: Optional[ Union[Callable, str, Mapping, Sequence[Union[str, Mapping]]] ] = None, diff --git a/src/muse/constraints.py b/src/muse/constraints.py index 2d83a25e4..559de8eb9 100644 --- a/src/muse/constraints.py +++ b/src/muse/constraints.py @@ -72,7 +72,7 @@ def constraints( search_space: xr.DataArray, market: xr.Dataset, technologies: xr.Dataset, - year: Optional[int] = None, + year: int | None = None, **kwargs, ) -> Constraint: pass @@ -115,7 +115,7 @@ def constraints( from mypy_extensions import KwArg from muse.registration import registrator -from muse.timeslices import drop_timeslice +from muse.timeslices import broadcast_timeslice, distribute_timeslice, drop_timeslice CAPACITY_DIMS = "asset", "replacement", "region" """Default dimensions for capacity decision variables.""" @@ -248,11 +248,20 @@ def constraints( market: xr.Dataset, technologies: xr.Dataset, year: int | None = None, + timeslice_level: str | None = None, ) -> list[Constraint]: if year is None: year = int(market.year.min()) constraints = [ - function(demand, assets, search_space, market, technologies, year=year) + function( + demand, + assets, + search_space, + market, + technologies, + year=year, + timeslice_level=timeslice_level, + ) for function in constraint_closures ] return [constraint for constraint in constraints if constraint is not None] @@ -270,6 +279,7 @@ def max_capacity_expansion( year: int | None = None, forecast: int | None = None, interpolation: str = "linear", + **kwargs, ) -> Constraint: r"""Max-capacity addition, max-capacity growth, and capacity limits constraints. @@ -400,6 +410,7 @@ def demand( year: int | None = None, forecast: int = 5, interpolation: str = "linear", + **kwargs, ) -> Constraint: """Constraints production to meet demand.""" from muse.commodities import is_enduse @@ -423,6 +434,7 @@ def search_space( technologies: xr.Dataset, year: int | None = None, forecast: int = 5, + **kwargs, ) -> Constraint | None: """Removes disabled technologies.""" if search_space.all(): @@ -442,6 +454,8 @@ def max_production( market: xr.Dataset, technologies: xr.Dataset, year: int | None = None, + timeslice_level: str | None = None, + **kwargs, ) -> Constraint: """Constructs constraint between capacity and maximum production. @@ -451,7 +465,6 @@ def max_production( from xarray import ones_like, zeros_like from muse.commodities import is_enduse - from muse.timeslices import QuantityType, convert_timeslice if year is None: year = int(market.year.min()) @@ -470,15 +483,9 @@ def max_production( .sel(**kwargs) .drop_vars("technology") ) - capacity = ( - convert_timeslice( - techs.fixed_outputs, - market.timeslice, - QuantityType.EXTENSIVE, - ) - * techs.utilization_factor - ) - + capacity = distribute_timeslice( + techs.fixed_outputs, level=timeslice_level + ) * broadcast_timeslice(techs.utilization_factor, level=timeslice_level) if "asset" not in capacity.dims and "asset" in search_space.dims: capacity = capacity.expand_dims(asset=search_space.asset) production = ones_like(capacity) @@ -495,8 +502,8 @@ def max_production( maxadd = maxadd.rename(technology="replacement") maxadd = maxadd.where(maxadd == 0, 0.0) maxadd = maxadd.where(maxadd > 0, -1.0) - capacity = capacity * maxadd - production = production * maxadd + capacity = capacity * broadcast_timeslice(maxadd, level=timeslice_level) + production = production * broadcast_timeslice(maxadd, level=timeslice_level) b = b.rename(region="src_region") return xr.Dataset( dict(capacity=-cast(np.ndarray, capacity), production=production, b=b), @@ -512,6 +519,8 @@ def demand_limiting_capacity( market: xr.Dataset, technologies: xr.Dataset, year: int | None = None, + timeslice_level: str | None = None, + **kwargs, ) -> Constraint: """Limits the maximum combined capacity to match the demand. @@ -527,7 +536,13 @@ def demand_limiting_capacity( """ # We start with the maximum production constraint and the demand constraint capacity_constraint = max_production( - demand_, assets, search_space, market, technologies, year=year + demand_, + assets, + search_space, + market, + technologies, + year=year, + timeslice_level=timeslice_level, ) demand_constraint = demand( demand_, assets, search_space, market, technologies, year=year @@ -547,21 +562,9 @@ def demand_limiting_capacity( # utilization factor. if "timeslice" in b.dims or "timeslice" in capacity.dims: ratio = b / capacity - ts = ratio.timeslice.isel( - timeslice=ratio.min("replacement").argmax("timeslice") - ) - # We select this timeslice for each array - don't trust the indices: - # search for the right timeslice in the array and select it. - b = ( - b.isel(timeslice=(b.timeslice == ts).argmax("timeslice")) - if "timeslice" in b.dims - else b - ) - capacity = ( - capacity.isel(timeslice=(capacity.timeslice == ts).argmax("timeslice")) - if "timeslice" in capacity.dims - else capacity - ) + ts_index = ratio.min("replacement").argmax("timeslice") + b = b.isel(timeslice=ts_index) + capacity = capacity.isel(timeslice=ts_index) # An adjustment is required to account for technologies that have multiple output # commodities @@ -732,12 +735,13 @@ def minimum_service( market: xr.Dataset, technologies: xr.Dataset, year: int | None = None, + timeslice_level: str | None = None, + **kwargs, ) -> Constraint | None: """Constructs constraint between capacity and minimum service.""" from xarray import ones_like, zeros_like from muse.commodities import is_enduse - from muse.timeslices import QuantityType, convert_timeslice if "minimum_service_factor" not in technologies.data_vars: return None @@ -756,18 +760,13 @@ def minimum_service( if "region" in search_space.coords and "region" in technologies.dims: kwargs["region"] = assets.region techs = ( - technologies[["fixed_outputs", "utilization_factor", "minimum_service_factor"]] + technologies[["fixed_outputs", "minimum_service_factor"]] .sel(**kwargs) .drop_vars("technology") ) - capacity = ( - convert_timeslice( - techs.fixed_outputs, - market.timeslice, - QuantityType.EXTENSIVE, - ) - * techs.minimum_service_factor - ) + capacity = distribute_timeslice( + techs.fixed_outputs, level=timeslice_level + ) * broadcast_timeslice(techs.minimum_service_factor, level=timeslice_level) if "asset" not in capacity.dims: capacity = capacity.expand_dims(asset=search_space.asset) production = ones_like(capacity) @@ -779,7 +778,7 @@ def minimum_service( def lp_costs( - technologies: xr.Dataset, costs: xr.DataArray, timeslices: xr.DataArray + technologies: xr.Dataset, costs: xr.DataArray, timeslice_level: str | None = None ) -> xr.Dataset: """Creates costs for solving with scipy's LP solver. @@ -790,7 +789,6 @@ def lp_costs( >>> from muse import examples >>> technologies = examples.technodata("residential", model="medium") >>> search_space = examples.search_space("residential", model="medium") - >>> timeslices = examples.sector("residential", model="medium").timeslices >>> costs = ( ... search_space ... * np.arange(np.prod(search_space.shape)).reshape(search_space.shape) @@ -801,7 +799,7 @@ def lp_costs( >>> from muse.constraints import lp_costs >>> lpcosts = lp_costs( - ... technologies.sel(year=2020, region="R1"), costs, timeslices + ... technologies.sel(year=2020, region="R1"), costs ... ) >>> assert "capacity" in lpcosts.data_vars >>> assert "production" in lpcosts.data_vars @@ -831,29 +829,22 @@ def lp_costs( from xarray import zeros_like from muse.commodities import is_enduse - from muse.timeslices import convert_timeslice assert "year" not in technologies.dims - ts_costs = convert_timeslice(costs, timeslices) selection = dict( commodity=is_enduse(technologies.comm_usage), technology=technologies.technology.isin(costs.replacement), ) - if "region" in technologies.fixed_outputs.dims and "region" in ts_costs.coords: - selection["region"] = ts_costs.region + if "region" in technologies.fixed_outputs.dims and "region" in costs.coords: + selection["region"] = costs.region fouts = technologies.fixed_outputs.sel(selection).rename(technology="replacement") - # lpcosts.dims = Frozen({'asset': 2, - # 'replacement': 2, - # 'timeslice': 3, - # 'commodity': 1}) - # muse38: lpcosts.dims = Frozen({'asset': 2, , - # 'commodity': 1 - # 'replacement': 2, - # 'timeslice': 3}) - production = zeros_like(ts_costs * fouts) + production = zeros_like( + broadcast_timeslice(costs, level=timeslice_level) + * distribute_timeslice(fouts, level=timeslice_level) + ) for dim in production.dims: if isinstance(production.get_index(dim), pd.MultiIndex): production = drop_timeslice(production) @@ -862,43 +853,6 @@ def lp_costs( return xr.Dataset(dict(capacity=costs, production=production)) -def merge_lp( - costs: xr.Dataset, *constraints: Constraint -) -> tuple[xr.Dataset, list[Constraint]]: - """Unify coordinate systems of costs and constraints. - - In practice, this function brings costs and constraints into a single xr.Dataset and - then splits things up again. This ensures the dimensions are not only compatible, - but also such that that their order in memory is the same. - """ - from xarray import merge - - data = merge( - [costs] - + [ - constraint.rename( - b=f"b{i}", capacity=f"capacity{i}", production=f"production{i}" - ) - for i, constraint in enumerate(constraints) - ] - ) - - unified_costs = cast(xr.Dataset, data[["capacity", "production"]]) - unified_constraints = [ - xr.Dataset( - { - "capacity": data[f"capacity{i}"], - "production": data[f"production{i}"], - "b": data[f"b{i}"], - }, - attrs=constraint.attrs, - ) - for i, constraint in enumerate(constraints) - ] - - return unified_costs, unified_constraints - - def lp_constraint(constraint: Constraint, lpcosts: xr.Dataset) -> Constraint: """Transforms the constraint to LP data. @@ -983,7 +937,6 @@ def lp_constraint_matrix( ... .sel(region=assets.region) ... ), ... costs=search * np.arange(np.prod(search.shape)).reshape(search.shape), - ... timeslices=market.timeslice, ... ) For a simple example, we can first check the case where b is scalar. The result @@ -1103,7 +1056,6 @@ class ScipyAdapter: >>> from muse import examples >>> from muse.quantities import maximum_production - >>> from muse.timeslices import convert_timeslice >>> from muse import constraints as cs >>> res = examples.sector("residential", model="medium") >>> market = examples.residential_market("medium") @@ -1112,7 +1064,6 @@ class ScipyAdapter: >>> market_demand = 0.8 * maximum_production( ... res.technologies.interp(year=2025), ... assets.capacity.sel(year=2025).groupby("technology").sum("asset"), - ... timeslices=market.timeslice, ... ).rename(technology="asset") >>> costs = search * np.arange(np.prod(search.shape)).reshape(search.shape) >>> constraint = cs.max_capacity_expansion( @@ -1147,7 +1098,7 @@ class ScipyAdapter: >>> technologies = res.technologies.interp(year=market.year.min() + 5) >>> inputs = cs.ScipyAdapter.factory( - ... technologies, costs, market.timeslice, constraint + ... technologies, costs, constraint ... ) The decision variables are always constrained between zero and infinity: @@ -1172,7 +1123,7 @@ class ScipyAdapter: In practice, :py:func:`~muse.constraints.lp_costs` helps us define the decision variables (and ``c``). We can verify that the sizes are consistent: - >>> lpcosts = cs.lp_costs(technologies, costs, market.timeslice) + >>> lpcosts = cs.lp_costs(technologies, costs) >>> capsize = lpcosts.capacity.size >>> prodsize = lpcosts.production.size >>> assert inputs.c.size == capsize + prodsize @@ -1207,10 +1158,10 @@ def factory( cls, technologies: xr.Dataset, costs: xr.DataArray, - timeslices: pd.Index, *constraints: Constraint, + timeslice_level: str | None = None, ) -> ScipyAdapter: - lpcosts = lp_costs(technologies, costs, timeslices) + lpcosts = lp_costs(technologies, costs, timeslice_level=timeslice_level) data = cls._unified_dataset(technologies, lpcosts, *constraints) diff --git a/src/muse/costs.py b/src/muse/costs.py index 9c95a66bd..4461113eb 100644 --- a/src/muse/costs.py +++ b/src/muse/costs.py @@ -13,7 +13,7 @@ from muse.commodities import is_enduse, is_fuel, is_material, is_pollutant from muse.quantities import consumption -from muse.timeslices import QuantityType, convert_timeslice +from muse.timeslices import broadcast_timeslice, distribute_timeslice from muse.utilities import filter_input @@ -23,6 +23,7 @@ def net_present_value( capacity: xr.DataArray, production: xr.DataArray, year: int, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: """Net present value (NPV) of the relevant technologies. @@ -51,6 +52,7 @@ def net_present_value( capacity: xr.DataArray with the capacity of the relevant technologies production: xr.DataArray with the production of the relevant technologies year: int, the year of the forecast + timeslice_level: the desired timeslice level of the result (e.g. "hour", "day") Return: xr.DataArray with the NPV calculated for the relevant technologies @@ -79,10 +81,13 @@ def net_present_value( years = xr.DataArray(iyears, coords={"year": iyears}, dims="year") # Evolution of rates with time - rates = discount_factor( - years - year + 1, - interest_rate=techs.interest_rate, - mask=years <= year + life, + rates = broadcast_timeslice( + discount_factor( + years - year + 1, + interest_rate=techs.interest_rate, + mask=years <= year + life, + ), + level=timeslice_level, ) # Filters @@ -99,10 +104,8 @@ def net_present_value( raw_revenues = (production * prices_non_env * rates).sum(("commodity", "year")) # Cost of installed capacity - installed_capacity_costs = convert_timeslice( - techs.cap_par * (capacity**techs.cap_exp), - prices.timeslice, - QuantityType.EXTENSIVE, + installed_capacity_costs = distribute_timeslice( + techs.cap_par * (capacity**techs.cap_exp), level=timeslice_level ) # Cost related to environmental products @@ -123,21 +126,24 @@ def net_present_value( # Fixed costs fixed_costs = ( - convert_timeslice( - techs.fix_par * (capacity**techs.fix_exp), - prices.timeslice, - QuantityType.EXTENSIVE, + distribute_timeslice( + techs.fix_par * (capacity**techs.fix_exp), level=timeslice_level ) * rates ).sum("year") # Variable costs - tech_activity = (production.sel(commodity=products) / techs.fixed_outputs).max( - "commodity" - ) - variable_costs = ((techs.var_par * tech_activity**techs.var_exp) * rates).sum( - "year" - ) + tech_activity = ( + production.sel(commodity=products) + / broadcast_timeslice(techs.fixed_outputs, level=timeslice_level) + ).max("commodity") + variable_costs = ( + ( + broadcast_timeslice(techs.var_par, level=timeslice_level) + * tech_activity ** broadcast_timeslice(techs.var_exp, level=timeslice_level) + ) + * rates + ).sum("year") # Net present value result = raw_revenues - ( @@ -187,6 +193,7 @@ def equivalent_annual_cost( capacity: xr.DataArray, production: xr.DataArray, year: int, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: """Equivalent annual costs (or annualized cost) of a technology. @@ -204,13 +211,14 @@ def equivalent_annual_cost( capacity: xr.DataArray with the capacity of the relevant technologies production: xr.DataArray with the production of the relevant technologies year: int, the year of the forecast + timeslice_level: the desired timeslice level of the result (e.g. "hour", "day") Return: xr.DataArray with the EAC calculated for the relevant technologies """ npc = net_present_cost(technologies, prices, capacity, production, year) crf = capital_recovery_factor(technologies) - return npc * crf + return npc * broadcast_timeslice(crf, level=timeslice_level) def lifetime_levelized_cost_of_energy( @@ -219,6 +227,7 @@ def lifetime_levelized_cost_of_energy( capacity: xr.DataArray, production: xr.DataArray, year: int, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: """Levelized cost of energy (LCOE) of technologies over their lifetime. @@ -230,6 +239,7 @@ def lifetime_levelized_cost_of_energy( capacity: xr.DataArray with the capacity of the relevant technologies production: xr.DataArray with the production of the relevant technologies year: int, the year of the forecast + timeslice_level: the desired timeslice level of the result (e.g. "hour", "day") Return: xr.DataArray with the LCOE calculated for the relevant technologies @@ -257,10 +267,13 @@ def lifetime_levelized_cost_of_energy( years = xr.DataArray(iyears, coords={"year": iyears}, dims="year") # Evolution of rates with time - rates = discount_factor( - years=years - year + 1, - interest_rate=techs.interest_rate, - mask=years <= year + life, + rates = broadcast_timeslice( + discount_factor( + years=years - year + 1, + interest_rate=techs.interest_rate, + mask=years <= year + life, + ), + level=timeslice_level, ) # Filters @@ -270,13 +283,16 @@ def lifetime_levelized_cost_of_energy( fuels = is_fuel(technologies.comm_usage) # Calculate consumption - cons = consumption(technologies=techs, production=production, prices=prices) + cons = consumption( + technologies=techs, + production=production, + prices=prices, + timeslice_level=timeslice_level, + ) # Cost of installed capacity - installed_capacity_costs = convert_timeslice( - techs.cap_par * (capacity**techs.cap_exp), - prices.timeslice, - QuantityType.EXTENSIVE, + installed_capacity_costs = distribute_timeslice( + techs.cap_par * (capacity**techs.cap_exp), level=timeslice_level ) # Cost related to environmental products @@ -297,21 +313,24 @@ def lifetime_levelized_cost_of_energy( # Fixed costs fixed_costs = ( - convert_timeslice( - techs.fix_par * (capacity**techs.fix_exp), - prices.timeslice, - QuantityType.EXTENSIVE, + distribute_timeslice( + techs.fix_par * (capacity**techs.fix_exp), level=timeslice_level ) * rates ).sum("year") # Variable costs - tech_activity = (production.sel(commodity=products) / techs.fixed_outputs).max( - "commodity" - ) - variable_costs = ((techs.var_par * tech_activity**techs.var_exp) * rates).sum( - "year" - ) + tech_activity = ( + production.sel(commodity=products) + / broadcast_timeslice(techs.fixed_outputs, level=timeslice_level) + ).max("commodity") + variable_costs = ( + ( + broadcast_timeslice(techs.var_par, level=timeslice_level) + * tech_activity ** broadcast_timeslice(techs.var_exp, level=timeslice_level) + ) + * rates + ).sum("year") # Production prod = ( @@ -339,6 +358,7 @@ def annual_levelized_cost_of_energy( prices: xr.DataArray, interpolation: str = "linear", fill_value: Union[int, str] = "extrapolate", + timeslice_level: Optional[str] = None, **filters, ) -> xr.DataArray: """Undiscounted levelized cost of energy (LCOE) of technologies on each given year. @@ -366,6 +386,7 @@ def annual_levelized_cost_of_energy( This dataarray contains at least timeslice and commodity dimensions. interpolation: interpolation method. fill_value: Fill value for values outside the extrapolation range. + timeslice_level: the desired timeslice level of the result (e.g. "hour", "day") **filters: Anything by which prices can be filtered. Return: @@ -401,58 +422,34 @@ def annual_levelized_cost_of_energy( rates = techs.interest_rate / (1 - (1 + techs.interest_rate) ** (-life)) # Capital costs - annualized_capital_costs = ( - convert_timeslice( - techs.cap_par * rates, - prices.timeslice, - QuantityType.EXTENSIVE, - ) - / techs.utilization_factor - ) + annualized_capital_costs = distribute_timeslice( + techs.cap_par * rates, level=timeslice_level + ) / broadcast_timeslice(techs.utilization_factor, level=timeslice_level) # Fixed and variable running costs - o_and_e_costs = ( - convert_timeslice( - (techs.fix_par + techs.var_par), - prices.timeslice, - QuantityType.EXTENSIVE, - ) - / techs.utilization_factor - ) + o_and_e_costs = distribute_timeslice( + techs.fix_par + techs.var_par, level=timeslice_level + ) / broadcast_timeslice(techs.utilization_factor, level=timeslice_level) # Fuel costs from fixed and flexible inputs fuel_costs = ( - convert_timeslice(techs.fixed_inputs, prices.timeslice, QuantityType.EXTENSIVE) - * prices + distribute_timeslice(techs.fixed_inputs, level=timeslice_level) * prices ).sum("commodity") fuel_costs += ( - convert_timeslice( - techs.flexible_inputs, prices.timeslice, QuantityType.EXTENSIVE - ) - * prices + distribute_timeslice(techs.flexible_inputs, level=timeslice_level) * prices ).sum("commodity") # Environmental costs if "region" in techs.dims: env_costs = ( - ( - convert_timeslice( - techs.fixed_outputs, prices.timeslice, QuantityType.EXTENSIVE - ) - * prices - ) + (distribute_timeslice(techs.fixed_outputs, level=timeslice_level) * prices) .sel(region=techs.region) .sel(commodity=is_pollutant(techs.comm_usage)) .sum("commodity") ) else: env_costs = ( - ( - convert_timeslice( - techs.fixed_outputs, prices.timeslice, QuantityType.EXTENSIVE - ) - * prices - ) + (distribute_timeslice(techs.fixed_outputs, level=timeslice_level) * prices) .sel(commodity=is_pollutant(techs.comm_usage)) .sum("commodity") ) diff --git a/src/muse/data/default_settings.toml b/src/muse/data/default_settings.toml index 9081520fb..35bc1113c 100644 --- a/src/muse/data/default_settings.toml +++ b/src/muse/data/default_settings.toml @@ -70,8 +70,3 @@ summer.weekend.night = 150 summer.weekend.morning = 150 summer.weekend.afternoon = 150 summer.weekend.evening = 150 - -[timeslices.aggregates] -all-day = ["night", "morning", "afternoon", "early-peak", "late-peak", "evening"] -all-week = ["weekday", "weekend"] -all-year = ["winter", "summer", "spring-autumn"] diff --git a/src/muse/data/example/default/settings.toml b/src/muse/data/example/default/settings.toml index 981977384..8c508b8cd 100644 --- a/src/muse/data/example/default/settings.toml +++ b/src/muse/data/example/default/settings.toml @@ -41,7 +41,7 @@ commodities_out = '{path}/technodata/residential/CommOut.csv' [sectors.residential.subsectors.all] agents = '{path}/technodata/Agents.csv' existing_capacity = '{path}/technodata/residential/ExistingCapacity.csv' -lpsolver = "scipy" # Optional, defaults to "adhoc" +lpsolver = "scipy" # Optional, defaults to "scipy" constraints = [ # Optional, defaults to the constraints below "max_production", diff --git a/src/muse/data/example/default_retro/settings.toml b/src/muse/data/example/default_retro/settings.toml index 900a1469e..4c9cae789 100644 --- a/src/muse/data/example/default_retro/settings.toml +++ b/src/muse/data/example/default_retro/settings.toml @@ -41,7 +41,7 @@ commodities_out = '{path}/technodata/residential/CommOut.csv' [sectors.residential.subsectors.retro_and_new] agents = '{path}/technodata/Agents.csv' existing_capacity = '{path}/technodata/residential/ExistingCapacity.csv' -lpsolver = "scipy" # Optional, defaults to "adhoc" +lpsolver = "scipy" # Optional, defaults to "scipy" constraints = [ # Optional, defaults to the constraints below "max_production", diff --git a/src/muse/data/example/default_timeslice/output.py b/src/muse/data/example/default_timeslice/output.py deleted file mode 100644 index 68225919d..000000000 --- a/src/muse/data/example/default_timeslice/output.py +++ /dev/null @@ -1,45 +0,0 @@ -from typing import Optional - -import xarray as xr - -from muse.outputs.sector import market_quantity, register_output_quantity - - -@register_output_quantity -def supply_timeslice( - market: xr.Dataset, - capacity: xr.DataArray, - technologies: xr.Dataset, - sum_over: Optional[list[str]] = None, - drop: Optional[list[str]] = None, - rounding: int = 4, -) -> xr.DataArray: - """Current supply.""" - market = market.reset_index("timeslice") - result = ( - market_quantity(market.supply, sum_over=sum_over, drop=drop) - .rename("supply") - .to_dataframe() - .round(rounding) - ) - return result[result.supply != 0] - - -@register_output_quantity -def consumption_timeslice( - market: xr.Dataset, - capacity: xr.DataArray, - technologies: xr.Dataset, - sum_over: Optional[list[str]] = None, - drop: Optional[list[str]] = None, - rounding: int = 4, -) -> xr.DataArray: - """Current consumption.""" - market = market.reset_index("timeslice") - result = ( - market_quantity(market.consumption, sum_over=sum_over, drop=drop) - .rename("consumption") - .to_dataframe() - .round(rounding) - ) - return result[result.consumption != 0] diff --git a/src/muse/data/example/default_timeslice/settings.toml b/src/muse/data/example/default_timeslice/settings.toml index c566622c6..05130bfd0 100644 --- a/src/muse/data/example/default_timeslice/settings.toml +++ b/src/muse/data/example/default_timeslice/settings.toml @@ -41,7 +41,7 @@ commodities_out = '{path}/technodata/residential/CommOut.csv' [sectors.residential.subsectors.all] agents = '{path}/technodata/Agents.csv' existing_capacity = '{path}/technodata/residential/ExistingCapacity.csv' -lpsolver = "scipy" # Optional, defaults to "adhoc" +lpsolver = "scipy" # Optional, defaults to "scipy" constraints = [ # Optional, defaults to the constraints below "max_production", diff --git a/src/muse/decisions.py b/src/muse/decisions.py index 98292c5e7..1ecee7499 100644 --- a/src/muse/decisions.py +++ b/src/muse/decisions.py @@ -117,7 +117,9 @@ def mean(objectives: Dataset, *args, **kwargs) -> DataArray: from xarray import concat allobjectives = concat(objectives.data_vars.values(), dim="concat_var") - return allobjectives.mean(set(allobjectives.dims) - {"asset", "replacement"}) + return allobjectives.mean( + set(allobjectives.dims) - {"asset", "replacement", "timeslice"} + ) @register_decision diff --git a/src/muse/defaults.py b/src/muse/defaults.py index 4249e306d..e8ad0010b 100644 --- a/src/muse/defaults.py +++ b/src/muse/defaults.py @@ -2,12 +2,7 @@ from pathlib import Path -try: - import SGIModelData - - DEFAULT_SECTORS_DIRECTORY = SGIModelData.PATH -except ImportError: - DEFAULT_SECTORS_DIRECTORY = Path().cwd() / "data" +DEFAULT_SECTORS_DIRECTORY = Path().cwd() / "data" DATA_DIRECTORY = Path(__file__).parent / "data" """ Standard data directory.""" diff --git a/src/muse/demand_share.py b/src/muse/demand_share.py index e049f312a..9171a816f 100644 --- a/src/muse/demand_share.py +++ b/src/muse/demand_share.py @@ -63,6 +63,7 @@ def demand_share( RetrofitAgentInStandardDemandShare, ) from muse.registration import registrator +from muse.utilities import check_dimensions DEMAND_SHARE_SIGNATURE = Callable[ [Sequence[AbstractAgent], xr.Dataset, xr.Dataset, KwArg(Any)], xr.DataArray @@ -102,7 +103,27 @@ def demand_share( keyword_args = copy(keywords) keyword_args.update(**kwargs) - return function(agents, market, technologies, **keyword_args) + + # Check inputs + check_dimensions( + market, + ["commodity", "year", "timeslice", "region"], + optional=["dst_region"], + ) + check_dimensions( + technologies, + ["technology", "year", "region"], + optional=["timeslice", "commodity", "dst_region"], + ) + + # Calculate demand share + result = function(agents, market, technologies, **keyword_args) + + # Check result + check_dimensions( + result, ["timeslice", "commodity"], optional=["asset", "region"] + ) # TODO: asset should be required, but trade model is failing + return result return cast(DEMAND_SHARE_SIGNATURE, demand_share) @@ -112,9 +133,9 @@ def new_and_retro( agents: Sequence[AbstractAgent], market: xr.Dataset, technologies: xr.Dataset, - production: Union[str, Mapping, Callable] = "maximum_production", - current_year: Optional[int] = None, - forecast: int = 5, + current_year: int, + forecast: int, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: r"""Splits demand across new and retro agents. @@ -131,9 +152,9 @@ def new_and_retro( to the production method. The ``consumption`` reflects the demand for the commodities produced by the current sector. technologies: quantities describing the technologies. - production: Production method current_year: Current year of simulation forecast: How many years to forecast ahead + timeslice_level: the timeslice level of the sector (e.g. "hour", "day") Pseudo-code: @@ -145,7 +166,7 @@ def new_and_retro( A_{a, s}^r = w_s\sum_i A_a^{r, i} with :math:`w_s` a weight associated with each timeslice and determined via - :py:func:`muse.timeslices.convert_timeslice`. + :py:func:`muse.timeslices.distribute_timeslice`. #. An intermediate quantity, the :py:func:`unmet demand ` :math:`U` is defined from @@ -161,8 +182,8 @@ def new_and_retro( simplicity. The resulting expression has the same indices as the consumption :math:`\mathcal{C}_{c, s}^r`. - :math:`P` is any function registered with - :py:func:`@register_production`. + :math:`P` is the maximum production, given by + `. #. the *new* demand :math:`N` is defined as: @@ -236,21 +257,18 @@ def decommissioning(capacity): technologies, capacity, year=[current_year, current_year + forecast], - timeslices=market.timeslice, + timeslice_level=timeslice_level, ).squeeze("year") - if current_year is None: - current_year = market.year.min() - capacity = reduce_assets([u.assets.capacity for u in agents]) demands = new_and_retro_demands( capacity, market, technologies, - production=production, current_year=current_year, forecast=forecast, + timeslice_level=timeslice_level, ) demands = demands.where( @@ -314,8 +332,8 @@ def decommissioning(capacity): partial( maximum_production, technologies=regional_techs, - timeslices=market.timeslice, year=current_year, + timeslice_level=timeslice_level, ), id_to_nquantity, ) @@ -331,9 +349,9 @@ def standard_demand( agents: Sequence[AbstractAgent], market: xr.Dataset, technologies: xr.Dataset, - production: Union[str, Mapping, Callable] = "maximum_production", - current_year: Optional[int] = None, - forecast: int = 5, + current_year: int, + forecast: int, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: r"""Splits demand across new agents. @@ -350,9 +368,9 @@ def standard_demand( to the production method. The ``consumption`` reflects the demand for the commodities produced by the current sector. technologies: quantities describing the technologies. - production: Production method current_year: Current year of simulation forecast: How many years to forecast ahead + timeslice_level: the timeslice level of the sector (e.g. "hour", "day") """ from functools import partial @@ -368,63 +386,66 @@ def decommissioning(capacity): technologies, capacity, year=[current_year, current_year + forecast], - timeslices=market.timeslice, + timeslice_level=timeslice_level, ).squeeze("year") - if current_year is None: - current_year = market.year.min() + # Make sure there are no retrofit agents + for agent in agents: + if agent.category == "retrofit": + raise RetrofitAgentInStandardDemandShare() + # Calculate existing capacity capacity = reduce_assets([agent.assets.capacity for agent in agents]) + # Calculate new and retrofit demands demands = new_and_retro_demands( capacity, market, technologies, - production=production, current_year=current_year, forecast=forecast, + timeslice_level=timeslice_level, ) + # Only consider end-use commodities demands = demands.where( is_enduse(technologies.comm_usage.sel(commodity=demands.commodity)), 0 ) - for agent in agents: - if agent.category == "retrofit": - raise RetrofitAgentInStandardDemandShare() - id_to_share: MutableMapping[Hashable, xr.DataArray] = {} for region in demands.region.values: + # Calculate current capacity current_capacity: MutableMapping[Hashable, xr.DataArray] = { agent.uuid: agent.assets.capacity for agent in agents if agent.region == region } + + # Split demands between agents id_to_quantity = { agent.uuid: (agent.name, agent.region, agent.quantity) for agent in agents if agent.region == region } - retro_demands: MutableMapping[Hashable, xr.DataArray] = _inner_split( current_capacity, demands.retrofit.sel(region=region), decommissioning, id_to_quantity, ) - new_demands = _inner_split( current_capacity, demands.new.sel(region=region), partial( maximum_production, technologies=technologies.sel(region=region), - timeslices=market.timeslice, year=current_year, + timeslice_level=timeslice_level, ), id_to_quantity, ) + # Sum new and retrofit demands total_demands = { k: new_demands[k] + retro_demands[k] for k in new_demands.keys() } @@ -439,23 +460,22 @@ def unmet_forecasted_demand( agents: Sequence[AbstractAgent], market: xr.Dataset, technologies: xr.Dataset, - current_year: Optional[int] = None, - production: Union[str, Mapping, Callable] = "maximum_production", - forecast: int = 5, + current_year: int, + forecast: int, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: """Forecast demand that cannot be serviced by non-decommissioned current assets.""" from muse.commodities import is_enduse from muse.utilities import reduce_assets - if current_year is None: - current_year = market.year.min() - year = current_year + forecast comm_usage = technologies.comm_usage.sel(commodity=market.commodity) smarket: xr.Dataset = market.where(is_enduse(comm_usage), 0).interp(year=year) capacity = reduce_assets([u.assets.capacity.interp(year=year) for u in agents]) capacity = cast(xr.DataArray, capacity) - result = unmet_demand(smarket, capacity, technologies, production) + result = unmet_demand( + smarket, capacity, technologies, timeslice_level=timeslice_level + ) if "year" in result.dims: result = result.squeeze("year") return result @@ -474,6 +494,7 @@ def _inner_split( """ from numpy import logical_and + # Find decrease in capacity production by each asset over time shares: Mapping[Hashable, xr.DataArray] = { key: method(capacity=capacity) .groupby("technology") @@ -481,13 +502,12 @@ def _inner_split( .rename(technology="asset") for key, capacity in assets.items() } + + # Total decrease in production across assets try: summed_shares: xr.DataArray = xr.concat(shares.values(), dim="concat_dim").sum( "concat_dim" ) - - # Calculates the total demand assigned in the previous step with the "method" - # function across agents and assets. total: xr.DataArray = summed_shares.sum("asset") except AttributeError: raise AgentWithNoAssetsInDemandShare() @@ -495,7 +515,9 @@ def _inner_split( # Calculates the demand divided by the number of assets times the number of agents # if the demand is bigger than zero and the total demand assigned with the "method" # function is zero. - unassigned = (demand / (len(shares) * len(summed_shares))).where( + n_agents = len(quantity) + n_assets = summed_shares.sizes["asset"] + unassigned = (demand / (n_agents * n_assets)).where( logical_and(demand > 1e-12, total <= 1e-12), 0 ) @@ -515,7 +537,7 @@ def unmet_demand( market: xr.Dataset, capacity: xr.DataArray, technologies: xr.Dataset, - production: Union[str, Mapping, Callable] = "maximum_production", + timeslice_level: Optional[str] = None, ): r"""Share of the demand that cannot be serviced by the existing assets. @@ -527,29 +549,35 @@ def unmet_demand( The resulting expression has the same indices as the consumption :math:`\mathcal{C}_{c, s}^r`. - :math:`P` is any function registered with - :py:func:`@register_production`. + :math:`P` is the maximum production, given by . """ - from muse.production import factory as prod_factory + from muse.quantities import maximum_production + + # Calculate maximum production by existing assets + produced = maximum_production( + capacity=capacity, technologies=technologies, timeslice_level=timeslice_level + ) - prod_method = production if callable(production) else prod_factory(production) - assert callable(prod_method) - produced = prod_method(market=market, capacity=capacity, technologies=technologies) + # Total commodity production by summing over assets if "dst_region" in produced.dims: produced = produced.sum("asset").rename(dst_region="region") elif "region" in produced.coords and produced.region.dims: produced = produced.groupby("region").sum("asset") else: produced = produced.sum("asset") - return (market.consumption - produced).clip(min=0) + + # Unmet demand is the difference between the consumption and the production + unmet_demand = (market.consumption - produced).clip(min=0) + return unmet_demand def new_consumption( capacity: xr.DataArray, market: xr.Dataset, technologies: xr.Dataset, - current_year: Optional[int] = None, - forecast: int = 5, + current_year: int, + forecast: int, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: r"""Computes share of the demand attributed to new agents. @@ -563,21 +591,23 @@ def new_consumption( - P[\mathcal{M}(y + \Delta y), \mathcal{A}_{a, s}^r(y)] \right) - Where :math:`P` is a production function taking the market and assets as arguments. + Where :math:`P` the maximum production by existing assets, given by + . """ from numpy import minimum - if current_year is None: - current_year = market.year.min() - + # Interpolate capacity to forecast year capa = capacity.interp(year=current_year + forecast) assert isinstance(capa, xr.DataArray) + + # Interpolate market to forecast year market = market.interp(year=[current_year, current_year + forecast]) current = market.sel(year=current_year, drop=True) forecasted = market.sel(year=current_year + forecast, drop=True) + # Calculate the increase in consumption over the forecast period delta = (forecasted.consumption - current.consumption).clip(min=0) - missing = unmet_demand(current, capa, technologies) + missing = unmet_demand(current, capa, technologies, timeslice_level=timeslice_level) consumption = minimum(delta, missing) return consumption @@ -586,9 +616,9 @@ def new_and_retro_demands( capacity: xr.DataArray, market: xr.Dataset, technologies: xr.Dataset, - production: Union[str, Mapping, Callable] = "maximum_production", - current_year: Optional[int] = None, - forecast: int = 5, + current_year: int, + forecast: int, + timeslice_level: Optional[str] = None, ) -> xr.Dataset: """Splits demand into *new* and *retrofit* demand. @@ -602,65 +632,53 @@ def new_and_retro_demands( """ from numpy import minimum - from muse.production import factory as prod_factory - - production_method = production if callable(production) else prod_factory(production) - assert callable(production_method) - if current_year is None: - current_year = market.year.min() + from muse.quantities import maximum_production + # Interpolate market to forecast year smarket: xr.Dataset = market.interp(year=[current_year, current_year + forecast]) + + # Interpolate capacity to forecast year capa = capacity.interp(year=[current_year, current_year + forecast]) assert isinstance(capa, xr.DataArray) + if hasattr(capa, "region") and capa.region.dims == (): capa["region"] = "asset", [str(capa.region.values)] * len(capa.asset) + # Calculate demand to allocate to "new" agents new_demand = new_consumption( - capa, smarket, technologies, current_year=current_year, forecast=forecast + capa, + smarket, + technologies, + current_year=current_year, + forecast=forecast, + timeslice_level=timeslice_level, ) if "year" in new_demand.dims: new_demand = new_demand.squeeze("year") + # Maximum production in the forecast year by existing assets service = ( - production_method( - smarket.sel(year=current_year + forecast), - capa.sel(year=current_year + forecast), + maximum_production( technologies, + capa.sel(year=current_year + forecast), + timeslice_level=timeslice_level, ) .groupby("region") .sum("asset") ) - # existing asset should not execute beyond demand + + # Existing asset should not execute beyond demand service = minimum( service, smarket.consumption.sel(year=current_year + forecast, drop=True) ) + + # Leftover demand that cannot be serviced by existing assets or "new" agents retro_demand = ( smarket.consumption.sel(year=current_year + forecast, drop=True) - new_demand - service ).clip(min=0) - if "year" in retro_demand.dims: retro_demand = retro_demand.squeeze("year") return xr.Dataset({"new": new_demand, "retrofit": retro_demand}) - - -def new_demand( - capacity: xr.DataArray, - market: xr.Dataset, - technologies: xr.Dataset, - production: Union[str, Mapping, Callable] = "maximum_production", - current_year: Optional[int] = None, - forecast: int = 5, -) -> xr.DataArray: - """Calculates the new demand that needs to be covered. - - It groups the demand related to an increase in consumption as well as the existing - demand associated with decommissoned assets. Internally, it just calls - `new_and_retro` demands and adds together both components. - """ - demand = new_and_retro_demands( - capacity, market, technologies, production, current_year, forecast - ) - return (demand["new"] + demand["retrofit"]).rename("demand") diff --git a/src/muse/examples.py b/src/muse/examples.py index 9aa4fa9b6..fb81e0647 100644 --- a/src/muse/examples.py +++ b/src/muse/examples.py @@ -203,7 +203,6 @@ def mca_market(model: str = "default") -> xr.Dataset: base_year_import=getattr( settings.global_input_files, "base_year_import", None ), - timeslices=settings.timeslices, ) .sel(region=settings.regions) .interp(year=settings.time_framework, method=settings.interpolation_mode) @@ -263,9 +262,7 @@ def matching_market(sector: str, model: str = "default") -> xr.Dataset: market = xr.Dataset() production = cast( xr.DataArray, - maximum_production( - loaded_sector.technologies, assets.capacity, loaded_sector.timeslices - ), + maximum_production(loaded_sector.technologies, assets.capacity), ) market["supply"] = production.sum("asset") if "dst_region" in market.dims: @@ -318,10 +315,6 @@ def _copy_default_timeslice(path: Path): example_data_dir() / "default_timeslice" / "settings.toml", path / "settings.toml", ) - copyfile( - example_data_dir() / "default_timeslice" / "output.py", - path / "output.py", - ) def _copy_multiple_agents(path: Path): diff --git a/src/muse/investments.py b/src/muse/investments.py index a95cec19d..9aee3de84 100644 --- a/src/muse/investments.py +++ b/src/muse/investments.py @@ -64,6 +64,7 @@ def investment( from muse.errors import GrowthOfCapacityTooConstrained from muse.outputs.cache import cache_quantity from muse.registration import registrator +from muse.timeslices import timeslice_max INVESTMENT_SIGNATURE = Callable[ [xr.DataArray, xr.DataArray, xr.Dataset, list[Constraint], KwArg(Any)], @@ -121,25 +122,6 @@ def factory(settings: Optional[Union[str, Mapping]] = None) -> Callable: name = settings["name"] params = {k: v for k, v in settings.items() if k != "name"} - top = params.get("timeslice_op", "max") - if isinstance(top, str): - if top.lower() == "max": - - def timeslice_op(x: xr.DataArray) -> xr.DataArray: - from muse.timeslices import convert_timeslice - - return (x / convert_timeslice(xr.DataArray(1), x)).max("timeslice") - - elif top.lower() == "sum": - - def timeslice_op(x: xr.DataArray) -> xr.DataArray: - return x.sum("timeslice") - - else: - raise ValueError(f"Unknown timeslice transform {top}") - - params["timeslice_op"] = timeslice_op - investment = INVESTMENTS[name] def compute_investment( @@ -154,6 +136,7 @@ def compute_investment( """ from numpy import zeros + # Skip the investment step if no assets or replacements are available if any(u == 0 for u in search.decision.shape): return xr.DataArray( zeros((len(search.asset), len(search.replacement))), @@ -161,6 +144,7 @@ def compute_investment( dims=("asset", "replacement"), ) + # Otherwise, compute the investment return investment( search.decision, search.search_space, @@ -175,8 +159,7 @@ def compute_investment( def cliff_retirement_profile( technical_life: xr.DataArray, - current_year: int = 0, - protected: int = 0, + investment_year: int, interpolation: str = "linear", **kwargs, ) -> xr.DataArray: @@ -186,19 +169,13 @@ def cliff_retirement_profile( Assets with a technical life smaller than the input time-period should automatically be renewed. - Hence, if ``technical_life <= protected``, then effectively, the technical life is - rewritten as ``technical_life * n`` with ``n = int(protected // technical_life) + - 1``. - We could just return an array where each year is represented. Instead, to save memory, we return a compact view of the same where years where no change happens are removed. Arguments: technical_life: lifetimes for each technology - current_year: current year - protected: The technologies are assumed to be renewed between years - `current_year` and `current_year + protected` + investment_year: The year in which the investment is made interpolation: Interpolation type **kwargs: arguments by which to filter technical_life, if any. @@ -211,26 +188,26 @@ def cliff_retirement_profile( if kwargs: technical_life = technical_life.sel(**kwargs) if "year" in technical_life.dims: - technical_life = technical_life.interp(year=current_year, method=interpolation) - technical_life = (1 + protected // technical_life) * technical_life # type:ignore + technical_life = technical_life.interp( + year=investment_year, method=interpolation + ) + # Create profile across all years if len(technical_life) > 0: - max_year = int(current_year + technical_life.max()) + max_year = int(investment_year + technical_life.max()) else: - max_year = int(current_year + protected) + max_year = investment_year allyears = xr.DataArray( - range(current_year, max_year + 1), + range(investment_year, max_year + 1), dims="year", - coords={"year": range(current_year, max_year + 1)}, + coords={"year": range(investment_year, max_year + 1)}, ) + profile = allyears < (investment_year + technical_life) # type: ignore - profile = allyears < (current_year + technical_life) # type: ignore - - # now we minimize the number of years needed to represent the profile fully - # this is done by removing the central year of any three repeating year, ensuring - # the removed year can be recovered by a linear interpolation. + # Minimize the number of years needed to represent the profile fully + # This is done by removing the central year of any three repeating years, ensuring + # the removed year can be recovered by linear interpolation. goodyears = avoid_repetitions(profile.astype(int)) - return profile.sel(year=goodyears).astype(bool) @@ -248,7 +225,7 @@ def adhoc_match_demand( technologies: xr.Dataset, constraints: list[Constraint], year: int, - timeslice_op: Optional[Callable[[xr.DataArray], xr.DataArray]] = None, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: from muse.demand_matching import demand_matching from muse.quantities import capacity_in_use, maximum_production @@ -259,18 +236,14 @@ def adhoc_match_demand( max_prod = maximum_production( technologies, max_capacity, - timeslices=demand, year=year, technology=costs.replacement, commodity=demand.commodity, + timeslice_level=timeslice_level, ).drop_vars("technology") # Push disabled techs to last rank. # Any production assigned to them by the demand-matching algorithm will be removed. - - if "timeslice" in costs.dims and timeslice_op is not None: - costs = costs.mean("timeslice").mean("asset") # timeslice_op(costs) - minobj = costs.min() maxobj = costs.where(search_space, minobj).max("replacement") + 1 @@ -283,10 +256,14 @@ def adhoc_match_demand( ).where(search_space, 0) capacity = capacity_in_use( - production, technologies, year=year, technology=production.replacement + production, + technologies, + year=year, + technology=production.replacement, + timeslice_level=timeslice_level, ).drop_vars("technology") - if "timeslice" in capacity.dims and timeslice_op is not None: - capacity = timeslice_op(capacity) + if "timeslice" in capacity.dims: + capacity = timeslice_max(capacity) result = xr.Dataset({"capacity": capacity, "production": production}) return result @@ -299,7 +276,7 @@ def scipy_match_demand( technologies: xr.Dataset, constraints: list[Constraint], year: Optional[int] = None, - timeslice_op: Optional[Callable[[xr.DataArray], xr.DataArray]] = None, + timeslice_level: Optional[str] = None, **options, ) -> xr.DataArray: from logging import getLogger @@ -308,20 +285,24 @@ def scipy_match_demand( from muse.constraints import ScipyAdapter - if "timeslice" in costs.dims and timeslice_op is not None: - costs = timeslice_op(costs) + if "timeslice" in costs.dims: + costs = timeslice_max(costs) + + # Select technodata for the current year if "year" in technologies.dims and year is None: raise ValueError("Missing year argument") elif "year" in technologies.dims: techs = technologies.sel(year=year).drop_vars("year") else: techs = technologies - timeslice = next(cs.timeslice for cs in constraints if "timeslice" in cs.dims) + # Run scipy optimization with highs solver adapter = ScipyAdapter.factory( - techs, cast(np.ndarray, costs), timeslice, *constraints + techs, cast(np.ndarray, costs), *constraints, timeslice_level=timeslice_level ) res = linprog(**adapter.kwargs, method="highs") + + # Backup: try with highs-ipm if not res.success and (res.status != 0): res = linprog( **adapter.kwargs, @@ -343,7 +324,9 @@ def scipy_match_demand( getLogger(__name__).critical(msg) raise GrowthOfCapacityTooConstrained - return cast(Callable[[np.ndarray], xr.Dataset], adapter.to_muse)(res.x) + # Convert results to a MUSE friendly format + result = cast(Callable[[np.ndarray], xr.Dataset], adapter.to_muse)(res.x) + return result @register_investment(name=["cvxopt"]) @@ -353,7 +336,6 @@ def cvxopt_match_demand( technologies: xr.Dataset, constraints: list[Constraint], year: Optional[int] = None, - timeslice_op: Optional[Callable[[xr.DataArray], xr.DataArray]] = None, **options, ) -> xr.DataArray: from importlib import import_module @@ -369,9 +351,7 @@ def cvxopt_match_demand( techs = technologies def default_to_scipy(): - return scipy_match_demand( - costs, search_space, techs, constraints, timeslice_op=timeslice_op - ) + return scipy_match_demand(costs, search_space, techs, constraints) try: cvxopt = import_module("cvxopt") @@ -384,8 +364,8 @@ def default_to_scipy(): getLogger(__name__).critical(msg) return default_to_scipy() - if "timeslice" in costs.dims and timeslice_op is not None: - costs = timeslice_op(costs) + if "timeslice" in costs.dims: + costs = timeslice_max(costs) timeslice = next(cs.timeslice for cs in constraints if "timeslice" in cs.dims) adapter = ScipyAdapter.factory( techs, -cast(np.ndarray, costs), timeslice, *constraints diff --git a/src/muse/mca.py b/src/muse/mca.py index 0a581df3f..2bc43da1d 100644 --- a/src/muse/mca.py +++ b/src/muse/mca.py @@ -14,7 +14,7 @@ from muse.outputs.cache import OutputCache from muse.readers import read_initial_market from muse.sectors import SECTORS_REGISTERED, AbstractSector, Sector -from muse.timeslices import drop_timeslice +from muse.timeslices import broadcast_timeslice, drop_timeslice from muse.utilities import future_propagation @@ -57,7 +57,6 @@ def factory(cls, settings: str | Path | Mapping | Any) -> MCA: base_year_import=getattr( settings.global_input_files, "base_year_import", None ), - timeslices=settings.timeslices, ).sel(region=settings.regions) ).interp(year=settings.time_framework, method=settings.interpolation_mode) @@ -270,23 +269,14 @@ def run(self) -> None: """ from logging import getLogger - from numpy import where from xarray import DataArray - _, self.sectors, hist_years = self.calibrate_legacy_sectors() - if len(hist_years) > 0: - hist = where(self.time_framework <= hist_years[-1])[0] - start = hist[-1] - - else: - start = -1 - nyear = len(self.time_framework) - 1 check_carbon_budget = len(self.carbon_budget) and len(self.carbon_commodities) shoots = self.control_undershoot or self.control_overshoot variables = ["supply", "consumption", "prices"] - for year_idx in range(start + 1, nyear): + for year_idx in range(nyear): years = self.time_framework[year_idx : year_idx + 2] getLogger(__name__).info(f"Running simulation year {years[0]}...") new_market = self.market[variables].sel(year=years) @@ -301,11 +291,12 @@ def run(self) -> None: new_market.prices.loc[dict(commodity=self.carbon_commodities)] = ( future_propagation( new_market.prices.sel(commodity=self.carbon_commodities), - future_price, + broadcast_timeslice(future_price), ) ) self.carbon_price = future_propagation(self.carbon_price, future_price) + # Solve the market _, new_market, self.sectors = self.find_equilibrium(new_market) # Save sector outputs @@ -320,78 +311,23 @@ def run(self) -> None: new_market, year_idx ) + # Update the market dims = {i: new_market[i] for i in new_market.dims} self.market.supply.loc[dims] = new_market.supply self.market.consumption.loc[dims] = new_market.consumption - dims = {i: new_market[i] for i in new_market.prices.dims if i != "year"} self.market.prices.loc[dims] = future_propagation( self.market.prices.sel(dims), new_market.prices.sel(year=years[1]) ) + # Global outputs self.outputs(self.market, self.sectors, year=self.time_framework[year_idx]) # type: ignore self.outputs_cache.consolidate_cache(year=self.time_framework[year_idx]) + getLogger(__name__).info( f"Finish simulation year {years[0]} ({year_idx+1}/{nyear})!" ) - def calibrate_legacy_sectors(self): - """Run a calibration step in the legacy sectors. - - Run historical years. - """ - from copy import deepcopy - from logging import getLogger - - from numpy import where - - hist_years = [] - if len([s for s in self.sectors if "LegacySector" in str(type(s))]) == 0: - return None, self.sectors, hist_years - - sectors = [] - idx = [] - for i, s in enumerate(self.sectors): - if "LegacySector" in str(type(s)): - s.mode = "Calibration" - sectors.append(s) - idx.append(i) - - getLogger(__name__).info("Calibrating LegacySectors...") - - if 2015 in self.time_framework: - hist_years = self.time_framework[where(self.time_framework <= 2015)] - hist = len(hist_years) - for year_idx in range(hist): # range(nyear): - years = self.time_framework[year_idx : year_idx + 1] - sectors = deepcopy(sectors) - variables = ["supply", "consumption", "prices"] - new_market = self.market[variables].sel(year=years).copy(deep=True) - for sector in sectors: - sector_market = sector.next( - new_market[["supply", "consumption", "prices"]] # type:ignore - ) - - sector_market = sector_market.sel(year=new_market.year) - - dims = {i: sector_market[i] for i in sector_market.consumption.dims} - - sector_market.consumption.loc[dims] = ( - sector_market.consumption.loc[dims] - sector_market.supply.loc[dims] - ).clip(min=0.0, max=None) - new_market.consumption.loc[dims] += sector_market.consumption - - dims = {i: sector_market[i] for i in sector_market.supply.dims} - new_market.supply.loc[dims] += sector_market.supply - - for i, s in enumerate(sectors): - s.mode = "Iteration" - self.sectors[idx[i]] = s - - getLogger(__name__).info("Finish calibration of LegacySectors!") - - return None, self.sectors, hist_years - class SingleYearIterationResult(NamedTuple): """Result of iterating over sectors for a year. @@ -425,29 +361,26 @@ def single_year_iteration( if "updated_prices" not in market.data_vars: market["updated_prices"] = drop_timeslice(market.prices.copy()) - # eventually, the first market should be one that creates the initial demand for sector in sectors: + # Solve the sector sector_market = sector.next( market[["supply", "consumption", "prices"]] # type:ignore ) - sector_market = sector_market.sel(year=market.year) + # Calculate net consumption dims = {i: sector_market[i] for i in sector_market.consumption.dims} - sector_market.consumption.loc[dims] = ( sector_market.consumption.loc[dims] - sector_market.supply.loc[dims] ).clip(min=0.0, max=None) + # Update market supply and consumption market.consumption.loc[dims] += sector_market.consumption - dims = {i: sector_market[i] for i in sector_market.supply.dims} market.supply.loc[dims] += sector_market.supply + # Update market prices costs = sector_market.costs.sel(commodity=is_enduse(sector_market.comm_usage)) - - # do not write costs lower than 1e-4 - # should correspond to rounding value if len(costs.commodity) > 0: costs = costs.where(costs > 1e-4, 0) dims = {i: costs[i] for i in costs.dims} @@ -566,7 +499,9 @@ def check_demand_fulfillment(market: Dataset, tol: float) -> bool: from logging import getLogger future = market.year[-1].item() - delta = (market.supply - market.consumption).sel(year=future) + delta = (market.supply.sum("timeslice") - market.consumption.sum("timeslice")).sel( + year=future + ) unmet = (delta < tol).any([u for u in delta.dims if u != "commodity"]) if unmet.any(): diff --git a/src/muse/objectives.py b/src/muse/objectives.py index 207c9e499..08a705f87 100644 --- a/src/muse/objectives.py +++ b/src/muse/objectives.py @@ -42,9 +42,8 @@ def comfort( these parameters. Returns: - A DataArray with at least one dimension corresponding to ``replacement``. - Other dimensions can be present, as long as the subsequent decision function knows - how to reduce them. + A DataArray with at least two dimension corresponding to `replacement` and `asset`. + A `timeslice` dimension may also be present. """ __all__ = [ @@ -63,7 +62,7 @@ def comfort( ] from collections.abc import Mapping, MutableMapping, Sequence -from typing import Any, Callable, Union +from typing import Any, Callable, Optional, Union import numpy as np import xarray as xr @@ -71,8 +70,8 @@ def comfort( from muse.outputs.cache import cache_quantity from muse.registration import registrator -from muse.timeslices import drop_timeslice -from muse.utilities import filter_input +from muse.timeslices import broadcast_timeslice, distribute_timeslice, drop_timeslice +from muse.utilities import check_dimensions, filter_input OBJECTIVE_SIGNATURE = Callable[ [xr.Dataset, xr.DataArray, xr.DataArray, KwArg(Any)], xr.DataArray @@ -130,15 +129,23 @@ def objectives( technologies: xr.Dataset, demand: xr.DataArray, prices: xr.DataArray, + timeslice_level: Optional[str] = None, *args, **kwargs, ) -> xr.Dataset: result = xr.Dataset() for name, objective in functions: obj = objective( - technologies=technologies, demand=demand, prices=prices, *args, **kwargs + technologies=technologies, + demand=demand, + prices=prices, + timeslice_level=timeslice_level, + *args, + **kwargs, ) - if "timeslice" in obj.dims and "timeslice" in result.dims: + if "timeslice" not in obj.dims: + obj = broadcast_timeslice(obj, level=timeslice_level) + if "timeslice" in result.dims: obj = drop_timeslice(obj) result[name] = obj return result @@ -160,25 +167,30 @@ def register_objective(function: OBJECTIVE_SIGNATURE): from functools import wraps @wraps(function) - def decorated_objective(technologies: xr.Dataset, *args, **kwargs) -> xr.DataArray: + def decorated_objective( + technologies: xr.Dataset, demand: xr.DataArray, *args, **kwargs + ) -> xr.DataArray: from logging import getLogger - result = function(technologies, *args, **kwargs) + # Check inputs + check_dimensions( + demand, ["asset", "timeslice", "commodity"], optional=["region"] + ) + check_dimensions( + technologies, ["replacement", "commodity"], optional=["timeslice"] + ) + # Calculate objective + result = function(technologies, demand, *args, **kwargs) + result.name = function.__name__ + + # Check result dtype = result.values.dtype if not (np.issubdtype(dtype, np.number) or np.issubdtype(dtype, np.bool_)): msg = f"dtype of objective {function.__name__} is not a number ({dtype})" getLogger(function.__module__).warning(msg) + check_dimensions(result, ["replacement", "asset"], optional=["timeslice"]) - if "replacement" not in result.dims: - raise RuntimeError("Objective should return a dimension 'replacement'") - if "technology" in result.dims: - raise RuntimeError("Objective should not return a dimension 'technology'") - if "technology" in result.coords: - raise RuntimeError("Objective should not return a coordinate 'technology'") - if "year" in result.dims: - raise RuntimeError("Objective should not return a dimension 'year'") - result.name = function.__name__ cache_quantity(**{result.name: result}) return result @@ -188,21 +200,25 @@ def decorated_objective(technologies: xr.Dataset, *args, **kwargs) -> xr.DataArr @register_objective def comfort( technologies: xr.Dataset, + demand: xr.DataArray, *args, **kwargs, ) -> xr.DataArray: """Comfort value provided by technologies.""" - return technologies.comfort + result = xr.broadcast(technologies.comfort, demand.asset)[0] + return result @register_objective def efficiency( technologies: xr.Dataset, + demand: xr.DataArray, *args, **kwargs, ) -> xr.DataArray: """Efficiency of the technologies.""" - return technologies.efficiency + result = xr.broadcast(technologies.efficiency, demand.asset)[0] + return result @register_objective(name="capacity") @@ -214,12 +230,8 @@ def capacity_to_service_demand( ) -> xr.DataArray: """Minimum capacity required to fulfill the demand.""" from muse.quantities import capacity_to_service_demand - from muse.timeslices import represent_hours - hours = represent_hours(demand.timeslice) - return capacity_to_service_demand( - demand=demand, technologies=technologies, hours=hours - ) + return capacity_to_service_demand(demand=demand, technologies=technologies) @register_objective @@ -230,13 +242,12 @@ def capacity_in_use( **kwargs, ): from muse.commodities import is_enduse - from muse.timeslices import represent_hours + from muse.timeslices import TIMESLICE - hours = represent_hours(demand.timeslice) enduses = is_enduse(technologies.comm_usage.sel(commodity=demand.commodity)) return ( - (demand.sel(commodity=enduses).sum("commodity") / hours).sum("timeslice") - * hours.sum() + (demand.sel(commodity=enduses).sum("commodity") / TIMESLICE).sum("timeslice") + * TIMESLICE.sum() / technologies.utilization_factor ) @@ -279,7 +290,9 @@ def fixed_costs( :math:`\alpha` and :math:`\beta` are "fix_par" and "fix_exp" in :ref:`inputs-technodata`, respectively. """ - capacity = capacity_to_service_demand(technologies, demand) + from muse.quantities import capacity_to_service_demand + + capacity = capacity_to_service_demand(technologies=technologies, demand=demand) result = technologies.fix_par * (capacity**technologies.fix_exp) return result @@ -287,6 +300,7 @@ def fixed_costs( @register_objective def capital_costs( technologies: xr.Dataset, + demand: xr.Dataset, *args, **kwargs, ) -> xr.DataArray: @@ -298,6 +312,7 @@ def capital_costs( simulation for each technology. """ result = technologies.cap_par * (technologies.scaling_size**technologies.cap_exp) + result = xr.broadcast(result, demand.asset)[0] return result @@ -322,18 +337,14 @@ def emission_cost( with :math:`s` the timeslices and :math:`c` the commodity. """ from muse.commodities import is_enduse, is_pollutant - from muse.timeslices import QuantityType, convert_timeslice enduses = is_enduse(technologies.comm_usage.sel(commodity=demand.commodity)) total = demand.sel(commodity=enduses).sum("commodity") envs = is_pollutant(technologies.comm_usage) prices = filter_input(prices, year=demand.year.item(), commodity=envs) - return total * ( - convert_timeslice( - technologies.fixed_outputs, prices.timeslice, QuantityType.EXTENSIVE - ) - * prices - ).sum("commodity") + return total * (distribute_timeslice(technologies.fixed_outputs) * prices).sum( + "commodity" + ) @register_objective @@ -372,10 +383,12 @@ def annual_levelized_cost_of_energy( """ from muse.costs import annual_levelized_cost_of_energy as aLCOE - return filter_input( + result = filter_input( aLCOE(technologies=technologies, prices=prices).max("timeslice"), year=demand.year.item(), ) + result = xr.broadcast(result, demand.asset)[0] + return result @register_objective(name=["LCOE", "LLCOE"]) @@ -383,6 +396,7 @@ def lifetime_levelized_cost_of_energy( technologies: xr.Dataset, demand: xr.DataArray, prices: xr.DataArray, + timeslice_level: Optional[str] = None, *args, **kwargs, ): @@ -394,15 +408,15 @@ def lifetime_levelized_cost_of_energy( due to a zero utilisation factor. """ from muse.costs import lifetime_levelized_cost_of_energy as LCOE - from muse.timeslices import QuantityType, convert_timeslice + from muse.quantities import capacity_to_service_demand - capacity = capacity_to_service_demand(technologies, demand) + capacity = capacity_to_service_demand( + technologies=technologies, demand=demand, timeslice_level=timeslice_level + ) production = ( - capacity - * convert_timeslice( - technologies.fixed_outputs, demand.timeslice, QuantityType.EXTENSIVE - ) - * technologies.utilization_factor + broadcast_timeslice(capacity, level=timeslice_level) + * distribute_timeslice(technologies.fixed_outputs, level=timeslice_level) + * broadcast_timeslice(technologies.utilization_factor, level=timeslice_level) ) results = LCOE( @@ -411,6 +425,7 @@ def lifetime_levelized_cost_of_energy( capacity=capacity, production=production, year=demand.year.item(), + timeslice_level=timeslice_level, ) return results.where(np.isfinite(results)).fillna(0.0) @@ -421,6 +436,7 @@ def net_present_value( technologies: xr.Dataset, demand: xr.DataArray, prices: xr.DataArray, + timeslice_level: Optional[str] = None, *args, **kwargs, ): @@ -429,15 +445,13 @@ def net_present_value( See :py:func:`muse.costs.net_present_value` for more details. """ from muse.costs import net_present_value as NPV - from muse.timeslices import QuantityType, convert_timeslice + from muse.quantities import capacity_to_service_demand - capacity = capacity_to_service_demand(technologies, demand) + capacity = capacity_to_service_demand(technologies=technologies, demand=demand) production = ( - capacity - * convert_timeslice( - technologies.fixed_outputs, demand.timeslice, QuantityType.EXTENSIVE - ) - * technologies.utilization_factor + broadcast_timeslice(capacity, level=timeslice_level) + * distribute_timeslice(technologies.fixed_outputs, level=timeslice_level) + * broadcast_timeslice(technologies.utilization_factor, level=timeslice_level) ) results = NPV( @@ -446,6 +460,7 @@ def net_present_value( capacity=capacity, production=production, year=demand.year.item(), + timeslice_level=timeslice_level, ) return results @@ -455,6 +470,7 @@ def net_present_cost( technologies: xr.Dataset, demand: xr.DataArray, prices: xr.DataArray, + timeslice_level: Optional[str] = None, *args, **kwargs, ): @@ -463,15 +479,13 @@ def net_present_cost( See :py:func:`muse.costs.net_present_cost` for more details. """ from muse.costs import net_present_cost as NPC - from muse.timeslices import QuantityType, convert_timeslice + from muse.quantities import capacity_to_service_demand - capacity = capacity_to_service_demand(technologies, demand) + capacity = capacity_to_service_demand(technologies=technologies, demand=demand) production = ( - capacity - * convert_timeslice( - technologies.fixed_outputs, demand.timeslice, QuantityType.EXTENSIVE - ) - * technologies.utilization_factor + broadcast_timeslice(capacity, level=timeslice_level) + * distribute_timeslice(technologies.fixed_outputs, level=timeslice_level) + * broadcast_timeslice(technologies.utilization_factor, level=timeslice_level) ) results = NPC( @@ -489,6 +503,7 @@ def equivalent_annual_cost( technologies: xr.Dataset, demand: xr.DataArray, prices: xr.DataArray, + timeslice_level: Optional[str] = None, *args, **kwargs, ): @@ -497,15 +512,13 @@ def equivalent_annual_cost( See :py:func:`muse.costs.equivalent_annual_cost` for more details. """ from muse.costs import equivalent_annual_cost as EAC - from muse.timeslices import QuantityType, convert_timeslice + from muse.quantities import capacity_to_service_demand - capacity = capacity_to_service_demand(technologies, demand) + capacity = capacity_to_service_demand(technologies=technologies, demand=demand) production = ( - capacity - * convert_timeslice( - technologies.fixed_outputs, demand.timeslice, QuantityType.EXTENSIVE - ) - * technologies.utilization_factor + broadcast_timeslice(capacity, level=timeslice_level) + * distribute_timeslice(technologies.fixed_outputs, level=timeslice_level) + * broadcast_timeslice(technologies.utilization_factor, level=timeslice_level) ) results = EAC( @@ -514,5 +527,6 @@ def equivalent_annual_cost( capacity=capacity, production=production, year=demand.year.item(), + timeslice_level=timeslice_level, ) return results diff --git a/src/muse/outputs/mca.py b/src/muse/outputs/mca.py index 10b0ae474..7d9284daf 100644 --- a/src/muse/outputs/mca.py +++ b/src/muse/outputs/mca.py @@ -16,7 +16,7 @@ def quantity( or an xarray xr.DataArray. """ -from collections.abc import Hashable, Iterable, Mapping, MutableMapping, Sequence +from collections.abc import Mapping, MutableMapping from operator import attrgetter from pathlib import Path from typing import ( @@ -35,7 +35,7 @@ def quantity( from muse.outputs.sector import market_quantity from muse.registration import registrator from muse.sectors import AbstractSector -from muse.timeslices import QuantityType, convert_timeslice, drop_timeslice +from muse.timeslices import distribute_timeslice from muse.utilities import multiindex_to_coords OUTPUT_QUANTITY_SIGNATURE = Callable[ @@ -233,463 +233,6 @@ def _aggregate_sectors( return pd.concat(alldata, sort=True) -@register_output_quantity -class AggregateResources: - """Aggregates a set of commodities.""" - - def __init__( - self, - commodities: Union[str, Iterable[Hashable]] = (), - metric: str = "consumption", - ): - if isinstance(commodities, str): - commodities = [commodities] - else: - commodities = list(commodities) - self.commodities: Sequence[Hashable] = commodities - self.metric = metric - self.aggregate: Optional[xr.DataArray] = None - - def __call__( - self, - market: xr.Dataset, - sectors: list[AbstractSector], - year: Optional[int] = None, - ) -> Optional[xr.DataArray]: - if len(self.commodities) == 0: - return None - if year is None: - year = int(market.year.min()) - quantity = cast(xr.DataArray, market[self.metric]).sel( - year=year, commodity=self.commodities, drop=True - ) - if self.aggregate is None: - self.aggregate = quantity - else: - self.aggregate += quantity - return self.aggregate - - -@register_output_quantity(name=["finite_resources"]) -class FiniteResources(AggregateResources): - """Aggregates a set of commodities.""" - - def __init__( - self, - limits_path: Union[str, Path, xr.DataArray], - commodities: Union[str, Iterable[Hashable]] = (), - metric: str = "consumption", - ): - from muse.readers.csv import read_finite_resources - - super().__init__(commodities=commodities, metric=metric) - if isinstance(limits_path, str): - limits_path = Path(limits_path) - if isinstance(limits_path, Path): - limits_path = read_finite_resources(limits_path) - - self.limits = limits_path - - def __call__( - self, - market: xr.Dataset, - sectors: list[AbstractSector], - year: Optional[int] = None, - ) -> Optional[xr.DataArray]: - if len(self.commodities) == 0: - return None - if year is None: - year = int(market.year.min()) - - limits = self.limits - if "year" in self.limits.dims: - limits = limits.interp(year=year) - - aggregate = super().__call__(market, sectors, year=year) - if aggregate is None: - return None - aggregate = aggregate.sum([u for u in aggregate.dims if u not in limits.dims]) - assert aggregate is not None - limits = limits.sum([u for u in limits.dims if u not in aggregate.dims]) - return aggregate <= limits.assign_coords(timeslice=aggregate.timeslice) - - -@register_output_quantity(name=["timeslice_supply"]) -def metric_supply( - market: xr.Dataset, sectors: list[AbstractSector], **kwargs -) -> pd.DataFrame: - """Current timeslice supply across all sectors.""" - market_out = market.copy(deep=True) - return _aggregate_sectors(sectors, market_out, op=sector_supply) - - -def sector_supply(sector: AbstractSector, market: xr.Dataset, **kwargs) -> pd.DataFrame: - """Sector supply with agent annotations.""" - from muse.production import supply - - data_sector: list[xr.DataArray] = [] - techs = getattr(sector, "technologies", []) - agents = sorted(getattr(sector, "agents", []), key=attrgetter("name")) - - if len(techs) > 0: - for a in agents: - output_year = a.year - a.forecast - capacity = a.filter_input(a.assets.capacity, year=output_year).fillna(0.0) - technologies = a.filter_input(techs, year=output_year).fillna(0.0) - agent_market = market.sel(year=output_year).copy() - agent_market["consumption"] = drop_timeslice( - agent_market.consumption * a.quantity - ) - included = [ - i - for i in agent_market["commodity"].values - if i in technologies.enduse.values - ] - excluded = [ - i for i in agent_market["commodity"].values if i not in included - ] - agent_market.loc[dict(commodity=excluded)] = 0 - - result = convert_timeslice( - supply( - agent_market, - capacity, - technologies, - ), - agent_market["consumption"].timeslice, - QuantityType.EXTENSIVE, - ) - - if "year" in result.dims: - data_agent = result.sel(year=output_year) - else: - data_agent = result - data_agent["year"] = output_year - if "dst_region" not in data_agent.coords: - data_agent["dst_region"] = a.region - data_agent["agent"] = a.name - data_agent["category"] = a.category - data_agent["sector"] = getattr(sector, "name", "unnamed") - - a = multiindex_to_coords(data_agent, "timeslice").to_dataframe("supply") - a["comm_usage"] = a["comm_usage"].apply(lambda x: x.name) - if not a.empty: - data_sector.append(a[a["supply"] != 0]) - - if len(data_sector) > 0: - output = pd.concat(data_sector, sort=True).reset_index() - else: - output = pd.DataFrame() - return output - - -@register_output_quantity(name=["yearly_supply"]) -def metricy_supply( - market: xr.Dataset, sectors: list[AbstractSector], **kwargs -) -> pd.DataFrame: - """Current yearlysupply across all sectors.""" - market_out = market.copy(deep=True) - return _aggregate_sectors(sectors, market_out, op=sectory_supply) - - -def sectory_supply( - sector: AbstractSector, market: xr.Dataset, **kwargs -) -> pd.DataFrame: - """Sector supply with agent annotations.""" - from muse.production import supply - - def capacity(agents): - """Aggregates capacity across agents. - - The capacities are aggregated leaving only two - dimensions: asset (technology, installation date, - region), year. - """ - from muse.utilities import filter_input, reduce_assets - - traded = [ - u.assets.capacity for u in agents if "dst_region" in u.assets.capacity.dims - ] - nontraded = [ - u.assets.capacity - for u in agents - if "dst_region" not in u.assets.capacity.dims - ] - if not traded: - full_list = [ - list(nontraded[i].year.values) - for i in range(len(nontraded)) - if "year" in nontraded[i].dims - ] - flat_list = [item for sublist in full_list for item in sublist] - years = sorted(list(set(flat_list))) - nontraded = [ - filter_input(u.assets.capacity, year=years) - for u in agents - if "dst_region" not in u.assets.capacity.dims - ] - - return reduce_assets(nontraded) - - if not nontraded: - full_list = [ - list(traded[i].year.values) - for i in range(len(traded)) - if "year" in traded[i].dims - ] - flat_list = [item for sublist in full_list for item in sublist] - years = sorted(list(set(flat_list))) - traded = [ - filter_input(u.assets.capacity, year=years) - for u in agents - if "dst_region" in u.assets.capacity.dims - ] - return reduce_assets(traded) - traded_results = reduce_assets(traded) - nontraded_results = reduce_assets(nontraded) - return reduce_assets( - [ - traded_results, - nontraded_results - * (nontraded_results.region == traded_results.dst_region), - ] - ) - - data_sector: list[xr.DataArray] = [] - techs = getattr(sector, "technologies", []) - agents = sorted(getattr(sector, "agents", []), key=attrgetter("name")) - - if len(techs) > 0: - if "dst_region" in techs.dims: - output_year = agents[0].year - agents[0].forecast - years = market.year.values - capacity = ( - capacity(agents) - .interp(year=years, method="linear") - .sel(year=output_year) - ) - agent_market = market.sel(year=output_year).copy() - agent_market["consumption"] = agent_market.consumption - technologies = techs.sel(year=output_year) - result = supply( - agent_market, - capacity, - technologies, - ) - - if "year" in result.dims: - data_agent = result.sel(year=output_year) - else: - data_agent = result - data_agent["year"] = output_year - - data_agent["agent"] = agents[0].name - data_agent["category"] = agents[0].category - data_agent["sector"] = getattr(sector, "name", "unnamed") - - a = data_agent.to_dataframe("supply") - a["comm_usage"] = a["comm_usage"].apply(lambda x: x.name) - if len(a) > 0 and len(a.technology.values) > 0: - b = a.reset_index() - b = b[b["supply"] != 0] - data_sector.append(b) - else: - for agent in agents: - output_year = agent.year - agent.forecast - capacity = agent.filter_input( - agent.assets.capacity, year=output_year - ).fillna(0.0) - technologies = techs.sel(year=output_year, region=agent.region) - agent_market = market.sel(year=output_year).copy() - agent_market["consumption"] = agent_market.consumption * agent.quantity - included = [ - i - for i in agent_market["commodity"].values - if i in technologies.enduse.values - ] - excluded = [ - i for i in agent_market["commodity"].values if i not in included - ] - agent_market.loc[dict(commodity=excluded)] = 0 - - result = supply( - agent_market, - capacity, - technologies, - ) - - if "year" in result.dims: - data_agent = result.sel(year=output_year) - else: - data_agent = result - data_agent["year"] = output_year - if "dst_region" not in data_agent.coords: - data_agent["dst_region"] = agent.region - data_agent["agent"] = agent.name - data_agent["category"] = agent.category - data_agent["sector"] = getattr(sector, "name", "unnamed") - - a = data_agent.to_dataframe("supply") - a["comm_usage"] = a["comm_usage"].apply(lambda x: x.name) - if len(a) > 0 and len(a.technology.values) > 0: - b = a.reset_index() - b = b[b["supply"] != 0] - data_sector.append(b) - - if len(data_sector) > 0: - output = pd.concat(data_sector, sort=True).reset_index() - else: - output = pd.DataFrame() - return output - - -@register_output_quantity(name=["timeslice_consumption"]) -def metric_consumption( - market: xr.Dataset, sectors: list[AbstractSector], **kwargs -) -> pd.DataFrame: - """Current timeslice consumption across all sectors.""" - return _aggregate_sectors(sectors, market, op=sector_consumption) - - -def sector_consumption( - sector: AbstractSector, market: xr.Dataset, **kwargs -) -> pd.DataFrame: - """Sector fuel consumption with agent annotations.""" - from muse.production import supply - from muse.quantities import consumption - - data_sector: list[xr.DataArray] = [] - techs = getattr(sector, "technologies", []) - agents = sorted(getattr(sector, "agents", []), key=attrgetter("name")) - - agent_market = market - if len(techs) > 0: - for a in agents: - output_year = a.year - a.forecast - capacity = a.filter_input(a.assets.capacity, year=output_year).fillna(0.0) - technologies = a.filter_input(techs, year=output_year).fillna(0.0) - agent_market = market.sel(year=output_year).copy() - agent_market["consumption"] = drop_timeslice( - agent_market.consumption * a.quantity - ) - included = [ - i - for i in agent_market["commodity"].values - if i in technologies.enduse.values - ] - excluded = [ - i for i in agent_market["commodity"].values if i not in included - ] - agent_market.loc[dict(commodity=excluded)] = 0 - - production = convert_timeslice( - supply( - agent_market, - capacity, - technologies, - ), - agent_market["consumption"].timeslice, - QuantityType.EXTENSIVE, - ) - prices = a.filter_input(market.prices, year=output_year) - result = consumption( - technologies=technologies, production=production, prices=prices - ) - if "year" in result.dims: - data_agent = result.sel(year=output_year) - else: - data_agent = result - data_agent["year"] = output_year - if "dst_region" not in data_agent.coords: - data_agent["dst_region"] = a.region - data_agent["agent"] = a.name - data_agent["category"] = a.category - data_agent["sector"] = getattr(sector, "name", "unnamed") - - a = multiindex_to_coords(data_agent, "timeslice").to_dataframe( - "consumption" - ) - a["comm_usage"] = a["comm_usage"].apply(lambda x: x.name) - if not a.empty: - data_sector.append(a[a["consumption"] != 0]) - - if len(data_sector) > 0: - output = pd.concat(data_sector, sort=True).reset_index() - else: - output = pd.DataFrame() - return output - - -@register_output_quantity(name=["yearly_consumption"]) -def metricy_consumption( - market: xr.Dataset, sectors: list[AbstractSector], **kwargs -) -> pd.DataFrame: - """Current yearly consumption across all sectors.""" - return _aggregate_sectors(sectors, market, op=sectory_consumption) - - -def sectory_consumption( - sector: AbstractSector, market: xr.Dataset, **kwargs -) -> pd.DataFrame: - """Sector fuel consumption with agent annotations.""" - from muse.production import supply - from muse.quantities import consumption - - data_sector: list[xr.DataArray] = [] - techs = getattr(sector, "technologies", []) - agents = sorted(getattr(sector, "agents", []), key=attrgetter("name")) - - agent_market = market - if len(techs) > 0: - for a in agents: - output_year = a.year - a.forecast - capacity = a.filter_input(a.assets.capacity, year=output_year).fillna(0.0) - technologies = a.filter_input(techs, year=output_year).fillna(0.0) - agent_market = market.sel(year=output_year).copy() - agent_market["consumption"] = agent_market.consumption * a.quantity - included = [ - i - for i in agent_market["commodity"].values - if i in technologies.enduse.values - ] - excluded = [ - i for i in agent_market["commodity"].values if i not in included - ] - agent_market.loc[dict(commodity=excluded)] = 0 - - production = supply( - agent_market, - capacity, - technologies, - ) - - prices = a.filter_input(market.prices, year=output_year) - result = consumption( - technologies=technologies, production=production, prices=prices - ) - if "year" in result.dims: - data_agent = result.sel(year=output_year) - else: - data_agent = result - data_agent["year"] = output_year - if "dst_region" not in data_agent.coords: - data_agent["dst_region"] = a.region - data_agent["agent"] = a.name - data_agent["category"] = a.category - data_agent["sector"] = getattr(sector, "name", "unnamed") - a = data_agent.to_dataframe("consumption") - a["comm_usage"] = a["comm_usage"].apply(lambda x: x.name) - if len(a) > 0 and len(a.technology.values) > 0: - b = a.reset_index() - b = b[b["consumption"] != 0] - data_sector.append(b) - if len(data_sector) > 0: - output = pd.concat(data_sector, sort=True).reset_index() - else: - output = pd.DataFrame() - return output - - @register_output_quantity(name=["fuel_costs"]) def metric_fuel_costs( market: xr.Dataset, sectors: list[AbstractSector], **kwargs @@ -724,14 +267,10 @@ def sector_fuel_costs( year=output_year, ).fillna(0.0) - production = convert_timeslice( - supply( - agent_market, - capacity, - technologies, - ), - agent_market["consumption"].timeslice, - QuantityType.EXTENSIVE, + production = supply( + agent_market, + capacity, + technologies, ) prices = a.filter_input(market.prices, year=output_year) @@ -775,7 +314,6 @@ def sector_capital_costs( if len(technologies) > 0: for a in agents: - demand = market.consumption * a.quantity output_year = a.year - a.forecast capacity = a.filter_input(a.assets.capacity, year=output_year).fillna(0.0) data = a.filter_input( @@ -783,12 +321,7 @@ def sector_capital_costs( year=output_year, technology=capacity.technology, ) - result = data.cap_par * (capacity**data.cap_exp) - data_agent = convert_timeslice( - result, - demand.timeslice, - QuantityType.EXTENSIVE, - ) + data_agent = distribute_timeslice(data.cap_par * (capacity**data.cap_exp)) data_agent["agent"] = a.name data_agent["category"] = a.category data_agent["sector"] = getattr(sector, "name", "unnamed") @@ -845,15 +378,12 @@ def sector_emission_costs( i = (np.where(envs))[0][0] red_envs = envs[i].commodity.values prices = a.filter_input(market.prices, year=output_year, commodity=red_envs) - production = convert_timeslice( - supply( - agent_market, - capacity, - technologies, - ), - agent_market["consumption"].timeslice, - QuantityType.EXTENSIVE, + production = supply( + agent_market, + capacity, + technologies, ) + total = production.sel(commodity=enduses).sum("commodity") data_agent = total * (allemissions * prices).sum("commodity") data_agent["agent"] = a.name @@ -920,11 +450,7 @@ def sector_lcoe(sector: AbstractSector, market: xr.Dataset, **kwargs) -> pd.Data capacity = agent.filter_input(capacity_to_service_demand(demand, techs)) production = ( capacity - * convert_timeslice( - techs.fixed_outputs, - demand.timeslice, - QuantityType.EXTENSIVE, - ) + * distribute_timeslice(techs.fixed_outputs) * techs.utilization_factor ) @@ -1001,11 +527,7 @@ def sector_eac(sector: AbstractSector, market: xr.Dataset, **kwargs) -> pd.DataF capacity = agent.filter_input(capacity_to_service_demand(demand, techs)) production = ( capacity - * convert_timeslice( - techs.fixed_outputs, - demand.timeslice, - QuantityType.EXTENSIVE, - ) + * distribute_timeslice(techs.fixed_outputs) * techs.utilization_factor ) diff --git a/src/muse/production.py b/src/muse/production.py index 5a21775bc..2887ad3e2 100644 --- a/src/muse/production.py +++ b/src/muse/production.py @@ -32,14 +32,13 @@ def production( __all__ = [ "PRODUCTION_SIGNATURE", - "demand_matched_production", "factory", "maximum_production", "register_production", "supply", ] from collections.abc import Mapping, MutableMapping -from typing import Any, Callable, Union, cast +from typing import Any, Callable, Optional, Union, cast import xarray as xr @@ -99,7 +98,10 @@ def factory( @register_production(name=("max", "maximum")) def maximum_production( - market: xr.Dataset, capacity: xr.DataArray, technologies: xr.Dataset + market: xr.Dataset, + capacity: xr.DataArray, + technologies: xr.Dataset, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: """Production when running at full capacity. @@ -108,12 +110,15 @@ def maximum_production( """ from muse.quantities import maximum_production - return maximum_production(technologies, capacity, timeslices=market.timeslice) + return maximum_production(technologies, capacity, timeslice_level) @register_production(name=("share", "shares")) def supply( - market: xr.Dataset, capacity: xr.DataArray, technologies: xr.Dataset + market: xr.Dataset, + capacity: xr.DataArray, + technologies: xr.Dataset, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: """Service current demand equally from all assets. @@ -122,93 +127,6 @@ def supply( """ from muse.quantities import supply - return supply(capacity, market.consumption, technologies) - - -@register_production(name="match") -def demand_matched_production( - market: xr.Dataset, - capacity: xr.DataArray, - technologies: xr.Dataset, - costs: str = "prices", -) -> xr.DataArray: - """Production from matching demand via annual lcoe.""" - from muse.costs import annual_levelized_cost_of_energy as lcoe - from muse.quantities import demand_matched_production, gross_margin - from muse.utilities import broadcast_techs - - if costs == "prices": - prices = market.prices - elif costs == "gross_margin": - prices = gross_margin(technologies, capacity, market.prices) - elif costs == "lcoe": - prices = lcoe( - market.prices, cast(xr.Dataset, broadcast_techs(technologies, capacity)) - ) - else: - raise ValueError(f"Unknown costs option {costs}") - - return demand_matched_production( - demand=market.consumption, - prices=prices, - capacity=capacity, - technologies=technologies, - ) - - -@register_production(name="costed") -def costed_production( - market: xr.Dataset, - capacity: xr.DataArray, - technologies: xr.Dataset, - costs: Union[xr.DataArray, Callable, str] = "alcoe", - with_minimum_service: bool = True, - with_emission: bool = True, -) -> xr.DataArray: - """Computes production from ranked assets. - - The assets are ranked according to their cost. The cost can be provided as an - xarray, a callable creating an xarray, or as "alcoe". The asset with least cost are - allowed to service the demand first, up to the maximum production. By default, the - minimum service is applied first. - """ - from muse.commodities import CommodityUsage, check_usage, is_pollutant - from muse.costs import annual_levelized_cost_of_energy - from muse.quantities import ( - costed_production, - emission, - ) - from muse.utilities import broadcast_techs - - if isinstance(costs, str) and costs.lower() == "alcoe": - costs = annual_levelized_cost_of_energy - elif isinstance(costs, str): - raise ValueError(f"Unknown cost {costs}") - if callable(costs): - technodata = cast(xr.Dataset, broadcast_techs(technologies, capacity)) - costs = costs( - prices=market.prices.sel(region=technodata.region), technologies=technodata - ) - else: - costs = costs - assert isinstance(costs, xr.DataArray) - - production = costed_production( - market.consumption, - costs, - capacity, - technologies, - with_minimum_service=with_minimum_service, + return supply( + capacity, market.consumption, technologies, timeslice_level=timeslice_level ) - # add production of environmental pollutants - if with_emission: - env = is_pollutant(technologies.comm_usage) - production[dict(commodity=env)] = emission( - production, technologies.fixed_outputs - ).transpose(*production.dims) - production[ - dict( - commodity=~check_usage(technologies.comm_usage, CommodityUsage.PRODUCT) - ) - ] = 0 - return production diff --git a/src/muse/quantities.py b/src/muse/quantities.py index 781be683c..27ccfba3c 100644 --- a/src/muse/quantities.py +++ b/src/muse/quantities.py @@ -8,18 +8,19 @@ """ from collections.abc import Sequence -from typing import Callable, Optional, Union, cast +from typing import Optional, Union, cast import numpy as np import xarray as xr +from muse.timeslices import broadcast_timeslice, distribute_timeslice + def supply( capacity: xr.DataArray, demand: xr.DataArray, technologies: Union[xr.Dataset, xr.DataArray], - interpolation: str = "linear", - production_method: Optional[Callable] = None, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: """Production and emission for a given capacity servicing a given demand. @@ -36,8 +37,7 @@ def supply( exceed its share of the demand. technologies: factors bindings the capacity of an asset with its production of commodities and environmental pollutants. - interpolation: Interpolation type - production_method: Production for a given capacity + timeslice_level: the desired timeslice level of the result (e.g. "hour", "day") Return: A data array where the commodity dimension only contains actual outputs (i.e. no @@ -45,11 +45,12 @@ def supply( """ from muse.commodities import CommodityUsage, check_usage, is_pollutant - if production_method is None: - production_method = maximum_production - - maxprod = production_method(technologies, capacity, timeslices=demand) - minprod = minimum_production(technologies, capacity, timeslices=demand) + maxprod = maximum_production( + technologies, capacity, timeslice_level=timeslice_level + ) + minprod = minimum_production( + technologies, capacity, timeslice_level=timeslice_level + ) size = np.array(maxprod.region).size # in presence of trade demand needs to map maxprod dst_region if ( @@ -88,8 +89,16 @@ def supply( demsum = set(maxprod.dims).difference(demand.dims) expanded_demand = (demand * maxprod / maxprod.sum(demsum)).fillna(0) - expanded_maxprod = (maxprod * demand / demand.sum(prodsum)).fillna(0) - expanded_minprod = (minprod * demand / demand.sum(prodsum)).fillna(0) + expanded_maxprod = ( + maxprod + * demand + / broadcast_timeslice(demand.sum(prodsum), level=timeslice_level) + ).fillna(0) + expanded_minprod = ( + minprod + * demand + / broadcast_timeslice(demand.sum(prodsum), level=timeslice_level) + ).fillna(0) expanded_demand = expanded_demand.reindex_like(maxprod) expanded_minprod = expanded_minprod.reindex_like(maxprod) @@ -100,9 +109,9 @@ def supply( # add production of environmental pollutants env = is_pollutant(technologies.comm_usage) - result[{"commodity": env}] = emission(result, technologies.fixed_outputs).transpose( - *result.dims - ) + result[{"commodity": env}] = emission( + result, technologies.fixed_outputs, timeslice_level=timeslice_level + ).transpose(*result.dims) result[ {"commodity": ~check_usage(technologies.comm_usage, CommodityUsage.PRODUCT)} ] = 0 @@ -110,7 +119,11 @@ def supply( return result -def emission(production: xr.DataArray, fixed_outputs: xr.DataArray): +def emission( + production: xr.DataArray, + fixed_outputs: xr.DataArray, + timeslice_level: Optional[str] = None, +): """Computes emission from current products. Emissions are computed as `sum(product) * fixed_outputs`. @@ -120,6 +133,7 @@ def emission(production: xr.DataArray, fixed_outputs: xr.DataArray): when computing emissions. fixed_outputs: factor relating total production to emissions. For convenience, this can also be a `technologies` dataset containing `fixed_output`. + timeslice_level: the desired timeslice level of the result (e.g. "hour", "day") Return: A data array containing emissions (and only emissions). @@ -133,13 +147,16 @@ def emission(production: xr.DataArray, fixed_outputs: xr.DataArray): ) envs = is_pollutant(fouts.comm_usage) enduses = is_enduse(fouts.comm_usage) - return production.sel(commodity=enduses).sum("commodity") * fouts.sel( - commodity=envs + return production.sel(commodity=enduses).sum("commodity") * broadcast_timeslice( + fouts.sel(commodity=envs), level=timeslice_level ) def gross_margin( - technologies: xr.Dataset, capacity: xr.DataArray, prices: xr.Dataset + technologies: xr.Dataset, + capacity: xr.DataArray, + prices: xr.Dataset, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: """The percentage of revenue after direct expenses have been subtracted. @@ -153,7 +170,6 @@ def gross_margin( - non-environmental commodities OUTPUTS are related to revenues. """ from muse.commodities import is_enduse, is_pollutant - from muse.timeslices import QuantityType, convert_timeslice from muse.utilities import broadcast_techs tech = broadcast_techs( # type: ignore @@ -190,21 +206,19 @@ def gross_margin( enduses = is_enduse(technologies.comm_usage) # Variable costs depend on factors such as labour - variable_costs = convert_timeslice( + variable_costs = distribute_timeslice( var_par * ((fixed_outputs.sel(commodity=enduses)).sum("commodity")) ** var_exp, - prices.timeslice, - QuantityType.EXTENSIVE, + level=timeslice_level, ) # The individual prices are selected # costs due to consumables, direct inputs consumption_costs = ( - prices - * convert_timeslice(fixed_inputs, prices.timeslice, QuantityType.EXTENSIVE) + prices * distribute_timeslice(fixed_inputs, level=timeslice_level) ).sum("commodity") # costs due to pollutants - production_costs = prices * convert_timeslice( - fixed_outputs, prices.timeslice, QuantityType.EXTENSIVE + production_costs = prices * distribute_timeslice( + fixed_outputs, level=timeslice_level ) environmental_costs = (production_costs.sel(commodity=environmentals)).sum( "commodity" @@ -223,8 +237,8 @@ def gross_margin( def decommissioning_demand( technologies: xr.Dataset, capacity: xr.DataArray, - timeslices: xr.DataArray, year: Optional[Sequence[int]] = None, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: r"""Computes demand from process decommissioning. @@ -259,10 +273,14 @@ def decommissioning_demand( baseyear = min(year) dyears = [u for u in year if u != baseyear] + # Calculate the decrease in capacity from the current year to future years + capacity_decrease = capacity.sel(year=baseyear) - capacity.sel(year=dyears) + + # Calculate production associated with this capacity return maximum_production( technologies, - capacity.sel(year=baseyear) - capacity.sel(year=dyears), - timeslices=timeslices, + capacity_decrease, + timeslice_level=timeslice_level, ).clip(min=0) @@ -270,6 +288,7 @@ def consumption( technologies: xr.Dataset, production: xr.DataArray, prices: Optional[xr.DataArray] = None, + timeslice_level: Optional[str] = None, **kwargs, ) -> xr.DataArray: """Commodity consumption when fulfilling the whole production. @@ -278,7 +297,6 @@ def consumption( are not given, then flexible consumption is *not* considered. """ from muse.commodities import is_enduse, is_fuel - from muse.timeslices import QuantityType, convert_timeslice from muse.utilities import filter_with_template params = filter_with_template( @@ -290,14 +308,10 @@ def consumption( comm_usage = technologies.comm_usage.sel(commodity=production.commodity) production = production.sel(commodity=is_enduse(comm_usage)).sum("commodity") - - if prices is not None and "timeslice" in prices.dims: - production = convert_timeslice( # type: ignore - production, prices, QuantityType.EXTENSIVE - ) - params_fuels = is_fuel(params.comm_usage) - consumption = production * params.fixed_inputs.where(params_fuels, 0) + consumption = production * broadcast_timeslice( + params.fixed_inputs.where(params_fuels, 0), level=timeslice_level + ) if prices is None: return consumption @@ -318,7 +332,9 @@ def consumption( ] # add consumption from cheapest fuel assert all(flexs.commodity.values == consumption.commodity.values) - flex = flexs.where(minprices == flexs.commodity, 0) + flex = flexs.where( + minprices == broadcast_timeslice(flexs.commodity, level=timeslice_level), 0 + ) flex = flex / (flex > 0).sum("commodity").clip(min=1) return consumption + flex * production @@ -326,7 +342,7 @@ def consumption( def maximum_production( technologies: xr.Dataset, capacity: xr.DataArray, - timeslices: xr.DataArray, + timeslice_level: Optional[str] = None, **filters, ): r"""Production for a given capacity. @@ -356,13 +372,13 @@ def maximum_production( technologies. Filters not relevant to the quantities of interest, i.e. filters that are not a dimension of `capacity` or `technologies`, are silently ignored. + timeslice_level: the desired timeslice level of the result (e.g. "hour", "day") Return: `capacity * fixed_outputs * utilization_factor`, whittled down according to the filters and the set of technologies in `capacity`. """ from muse.commodities import is_enduse - from muse.timeslices import QuantityType, convert_timeslice from muse.utilities import broadcast_techs, filter_input capa = filter_input( @@ -375,47 +391,18 @@ def maximum_production( btechs, **{k: v for k, v in filters.items() if k in btechs.dims} ) result = ( - capa - * convert_timeslice(ftechs.fixed_outputs, timeslices, QuantityType.EXTENSIVE) - * ftechs.utilization_factor + broadcast_timeslice(capa, level=timeslice_level) + * distribute_timeslice(ftechs.fixed_outputs, level=timeslice_level) + * broadcast_timeslice(ftechs.utilization_factor, level=timeslice_level) ) return result.where(is_enduse(result.comm_usage), 0) -def demand_matched_production( - demand: xr.DataArray, - prices: xr.DataArray, - capacity: xr.DataArray, - technologies: xr.Dataset, - **filters, -) -> xr.DataArray: - """Production matching the input demand. - - Arguments: - demand: demand to match. - prices: price from which to compute the annual levelized cost of energy. - capacity: capacity from which to obtain the maximum production constraints. - technologies: technologies we are looking at - **filters: keyword arguments with which to filter the input datasets and - data arrays., e.g. region, or year. - """ - from muse.costs import annual_levelized_cost_of_energy as ALCOE - from muse.demand_matching import demand_matching - from muse.utilities import broadcast_techs - - technodata = cast(xr.Dataset, broadcast_techs(technologies, capacity)) - cost = ALCOE(prices=prices, technologies=technodata, **filters) - max_production = maximum_production( - technodata, capacity, timeslices=demand, **filters - ) - assert ("timeslice" in demand.dims) == ("timeslice" in cost.dims) - return demand_matching(demand, cost, max_production) - - def capacity_in_use( production: xr.DataArray, technologies: xr.Dataset, max_dim: Optional[Union[str, tuple[str]]] = "commodity", + timeslice_level: Optional[str] = None, **filters, ): """Capacity-in-use for each asset, given production. @@ -433,6 +420,7 @@ def capacity_in_use( technologies. Filters not relevant to the quantities of interest, i.e. filters that are not a dimension of `capacity` or `technologies`, are silently ignored. + timeslice_level: the desired timeslice level of the result (e.g. "hour", "day") Return: Capacity-in-use for each technology, whittled down by the filters. @@ -452,7 +440,9 @@ def capacity_in_use( ) factor = 1 / (ftechs.fixed_outputs * ftechs.utilization_factor) - capa_in_use = (prod * factor).where(~np.isinf(factor), 0) + capa_in_use = (prod * broadcast_timeslice(factor, level=timeslice_level)).where( + ~np.isinf(factor), 0 + ) capa_in_use = capa_in_use.where( is_enduse(technologies.comm_usage.sel(commodity=capa_in_use.commodity)), 0 @@ -463,86 +453,10 @@ def capacity_in_use( return capa_in_use -def costed_production( - demand: xr.Dataset, - costs: xr.DataArray, - capacity: xr.DataArray, - technologies: xr.Dataset, - with_minimum_service: bool = True, -) -> xr.DataArray: - """Computes production from ranked assets. - - The assets are ranked according to their cost. The asset with least cost are allowed - to service the demand first, up to the maximum production. By default, the minimum - service is applied first. - """ - from muse.quantities import maximum_production - from muse.utilities import broadcast_techs - - technodata = cast(xr.Dataset, broadcast_techs(technologies, capacity)) - - if len(capacity.region.dims) == 0: - - def group_assets(x: xr.DataArray) -> xr.DataArray: - return x.sum("asset") - - else: - - def group_assets(x: xr.DataArray) -> xr.DataArray: - return xr.Dataset(dict(x=x)).groupby("region").sum("asset").x - - ranking = costs.rank("asset") - maxprod = maximum_production(technodata, capacity, timeslices=demand.timeslice) - commodity = (maxprod > 0).any([i for i in maxprod.dims if i != "commodity"]) - commodity = commodity.drop_vars( - [u for u in commodity.coords if u not in commodity.dims] - ) - demand = demand.sel(commodity=commodity).copy() - - constraints = ( - xr.Dataset(dict(maxprod=maxprod, ranking=ranking, has_output=maxprod > 0)) - .set_coords("ranking") - .set_coords("has_output") - .sel(commodity=commodity) - ) - - if not with_minimum_service: - production = xr.zeros_like(constraints.maxprod) - else: - production = ( - getattr(technodata, "minimum_service_factor", 0) * constraints.maxprod - ) - demand = np.maximum(demand - group_assets(production), 0) - - for rank in sorted(set(constraints.ranking.values.flatten())): - condition = (constraints.ranking == rank) & constraints.has_output - current_maxprod = constraints.maxprod.where(condition, 0) - fullprod = group_assets(current_maxprod) - if (fullprod <= demand + 1e-10).all(): - current_demand = fullprod - current_prod = current_maxprod - else: - if "region" in demand.dims: - demand_prod = demand.sel(region=production.region) - else: - demand_prod = demand - demand_prod = ( - current_maxprod / current_maxprod.sum("asset") * demand_prod - ).where(condition, 0) - current_prod = np.minimum(demand_prod, current_maxprod) - current_demand = group_assets(current_prod) - demand -= np.minimum(current_demand, demand) - production = production + current_prod - - result = xr.zeros_like(maxprod) - result[dict(commodity=commodity)] = result[dict(commodity=commodity)] + production - return result - - def minimum_production( technologies: xr.Dataset, capacity: xr.DataArray, - timeslices: xr.DataArray, + timeslice_level: Optional[str] = None, **filters, ): r"""Minimum production for a given capacity. @@ -572,13 +486,13 @@ def minimum_production( technologies. Filters not relevant to the quantities of interest, i.e. filters that are not a dimension of `capacity` or `technologies`, are silently ignored. + timeslice_level: the desired timeslice level of the result (e.g. "hour", "day") Return: `capacity * fixed_outputs * minimum_service_factor`, whittled down according to the filters and the set of technologies in `capacity`. """ from muse.commodities import is_enduse - from muse.timeslices import QuantityType, convert_timeslice from muse.utilities import broadcast_techs, filter_input capa = filter_input( @@ -586,7 +500,7 @@ def minimum_production( ) if "minimum_service_factor" not in technologies: - return xr.zeros_like(capa) + return broadcast_timeslice(xr.zeros_like(capa), level=timeslice_level) btechs = broadcast_techs( # type: ignore cast( @@ -599,9 +513,9 @@ def minimum_production( btechs, **{k: v for k, v in filters.items() if k in btechs.dims} ) result = ( - capa - * convert_timeslice(ftechs.fixed_outputs, timeslices, QuantityType.EXTENSIVE) - * ftechs.minimum_service_factor + broadcast_timeslice(capa, level=timeslice_level) + * distribute_timeslice(ftechs.fixed_outputs, level=timeslice_level) + * broadcast_timeslice(ftechs.minimum_service_factor, level=timeslice_level) ) return result.where(is_enduse(result.comm_usage), 0) @@ -609,17 +523,12 @@ def minimum_production( def capacity_to_service_demand( demand: xr.DataArray, technologies: xr.Dataset, - hours=None, + timeslice_level: Optional[str] = None, ) -> xr.DataArray: """Minimum capacity required to fulfill the demand.""" - from muse.timeslices import represent_hours - - if hours is None: - hours = represent_hours(demand.timeslice) - max_hours = hours.max() / hours.sum() - commodity_output = technologies.fixed_outputs.sel(commodity=demand.commodity) - max_demand = ( - demand.where(commodity_output > 0, 0) - / commodity_output.where(commodity_output > 0, 1) - ).max(("commodity", "timeslice")) - return max_demand / technologies.utilization_factor / max_hours + timeslice_outputs = distribute_timeslice( + technologies.fixed_outputs.sel(commodity=demand.commodity), + level=timeslice_level, + ) * broadcast_timeslice(technologies.utilization_factor, level=timeslice_level) + capa_to_service_demand = demand / timeslice_outputs + return capa_to_service_demand.max(("commodity", "timeslice")) diff --git a/src/muse/readers/__init__.py b/src/muse/readers/__init__.py index 631cecdaf..01e06dca3 100644 --- a/src/muse/readers/__init__.py +++ b/src/muse/readers/__init__.py @@ -2,7 +2,7 @@ from muse.defaults import DATA_DIRECTORY from muse.readers.csv import * # noqa: F403 -from muse.readers.toml import read_settings, read_timeslices # noqa: F401 +from muse.readers.toml import read_settings # noqa: F401 DEFAULT_SETTINGS_PATH = DATA_DIRECTORY / "default_settings.toml" """Default settings path.""" @@ -19,14 +19,3 @@ def camel_to_snake(name: str) -> str: result = result.replace("n2_o", "N2O") result = result.replace("f-gases", "F-gases") return result - - -def kebab_to_camel(string): - return "".join(x.capitalize() for x in string.split("-")) - - -def snake_to_kebab(string: str) -> str: - from re import sub - - result = sub(r"((?<=[a-z])[A-Z]|(? xr.Dataset: def read_technodata_timeslices(filename: Union[str, Path]) -> xr.Dataset: from muse.readers import camel_to_snake - from muse.timeslices import TIMESLICE, convert_timeslice + from muse.timeslices import sort_timeslices csv = pd.read_csv(filename, float_precision="high", low_memory=False) csv = csv.rename(columns=camel_to_snake) @@ -164,9 +163,7 @@ def read_technodata_timeslices(filename: Union[str, Path]) -> xr.Dataset: if item not in ["technology", "region", "year"] ] result = result.stack(timeslice=timeslice_levels) - result = convert_timeslice(result, TIMESLICE) - # sorts timeslices into the correct order - return result + return sort_timeslices(result) def read_io_technodata(filename: Union[str, Path]) -> xr.Dataset: @@ -256,7 +253,6 @@ def read_initial_capacity(data: Union[str, Path, pd.DataFrame]) -> xr.DataArray: .set_index(["region", "technology", "year"]) ) result = xr.DataArray.from_series(data["value"]) - # inconsistent legacy data files. result = result.sel(year=result.year != "2100.1") result["year"] = result.year.astype(int) return result @@ -410,35 +406,6 @@ def read_technologies( return result -def read_csv_timeslices(path: Union[str, Path], **kwargs) -> xr.DataArray: - """Reads timeslice information from input.""" - from logging import getLogger - - getLogger(__name__).info(f"Reading timeslices from {path}") - data = pd.read_csv(path, float_precision="high", **kwargs) - - def snake_case(string): - from re import sub - - result = sub(r"((?<=[a-z])[A-Z]|(? xr.Dataset: """Reads commodities information from input.""" from logging import getLogger @@ -473,7 +440,6 @@ def read_global_commodities(path: Union[str, Path]) -> xr.Dataset: def read_timeslice_shares( path: Union[str, Path] = DEFAULT_SECTORS_DIRECTORY, sector: Optional[str] = None, - timeslice: Union[str, Path, xr.DataArray] = "Timeslices{sector}.csv", ) -> xr.Dataset: """Reads sliceshare information into a xr.Dataset. @@ -492,12 +458,6 @@ def read_timeslice_shares( path, filename = path.parent, path.name re = match(r"TimesliceShare(.*)\.csv", filename) sector = path.name if re is None else re.group(1) - if isinstance(timeslice, str) and "{sector}" in timeslice: - timeslice = timeslice.format(sector=sector) - if isinstance(timeslice, (str, Path)) and not Path(timeslice).is_file(): - timeslice = find_sectors_file(timeslice, sector, path) - if isinstance(timeslice, (str, Path)): - timeslice = read_csv_timeslices(timeslice, low_memory=False) share_path = find_sectors_file(f"TimesliceShare{sector}.csv", sector, path) getLogger(__name__).info(f"Reading timeslice shares from {share_path}") @@ -510,13 +470,6 @@ def read_timeslice_shares( data.columns.name = "commodity" result = xr.DataArray(data).unstack("rt").to_dataset(name="shares") - - if timeslice is None: - result = result.drop_vars("timeslice") - elif isinstance(timeslice, xr.DataArray) and hasattr(timeslice, "timeslice"): - result["timeslice"] = timeslice.timeslice - else: - result["timeslice"] = timeslice return result.shares @@ -627,19 +580,16 @@ def read_initial_market( projections: Union[xr.DataArray, Path, str], base_year_import: Optional[Union[str, Path, xr.DataArray]] = None, base_year_export: Optional[Union[str, Path, xr.DataArray]] = None, - timeslices: Optional[xr.DataArray] = None, ) -> xr.Dataset: """Read projections, import and export csv files.""" from logging import getLogger - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, distribute_timeslice # Projections must always be present if isinstance(projections, (str, Path)): getLogger(__name__).info(f"Reading projections from {projections}") projections = read_attribute_table(projections) - if timeslices is not None: - projections = convert_timeslice(projections, timeslices, QuantityType.INTENSIVE) # Base year export is optional. If it is not there, it's set to zero if isinstance(base_year_export, (str, Path)): @@ -657,13 +607,8 @@ def read_initial_market( getLogger(__name__).info("Base year import not provided. Set to zero.") base_year_import = xr.zeros_like(projections) - if timeslices is not None: - base_year_export = convert_timeslice( - base_year_export, timeslices, QuantityType.EXTENSIVE - ) - base_year_import = convert_timeslice( - base_year_import, timeslices, QuantityType.EXTENSIVE - ) + base_year_export = distribute_timeslice(base_year_export, level=None) + base_year_import = distribute_timeslice(base_year_import, level=None) base_year_export.name = "exports" base_year_import.name = "imports" @@ -683,7 +628,7 @@ def read_initial_market( commodity_price="prices", units_commodity_price="units_prices" ) result["prices"] = ( - result["prices"].expand_dims({"timeslice": timeslices}).drop_vars("timeslice") + result["prices"].expand_dims({"timeslice": TIMESLICE}).drop_vars("timeslice") ) return result @@ -920,33 +865,6 @@ def read_trade( return result.rename(src_region="region") -def read_finite_resources(path: Union[str, Path]) -> xr.DataArray: - """Reads finite resources from csv file. - - The CSV file is made up of columns "Region", "Year", as well - as three timeslice columns ("Month", "Day", "Hour"). All three sets of columns are - optional. The timeslice set should contain a full set of timeslices, if present. - Other columns correspond to commodities. - """ - from muse.timeslices import TIMESLICE - - data = pd.read_csv(path) - data.columns = [c.lower() for c in data.columns] - ts_levels = TIMESLICE.get_index("timeslice").names - - if set(data.columns).issuperset(ts_levels): - timeslice = pd.MultiIndex.from_arrays( - [data[u] for u in ts_levels], names=ts_levels - ) - timeslice = pd.DataFrame(timeslice, columns=["timeslice"]) - data = pd.concat((data, timeslice), axis=1) - data.drop(columns=ts_levels, inplace=True) - indices = list({"year", "region", "timeslice"}.intersection(data.columns)) - data.set_index(indices, inplace=True) - - return xr.Dataset.from_dataframe(data).to_array(dim="commodity") - - def check_utilization_and_minimum_service_factors(data, filename): if "utilization_factor" not in data.columns: raise ValueError( diff --git a/src/muse/readers/toml.py b/src/muse/readers/toml.py index 5b357d636..55e97692c 100644 --- a/src/muse/readers/toml.py +++ b/src/muse/readers/toml.py @@ -16,7 +16,6 @@ ) import numpy as np -import pandas as pd import xarray as xr from muse.decorators import SETTINGS_CHECKS, register_settings_check @@ -360,6 +359,8 @@ def read_settings( Returns: A dictionary with the settings """ + from muse.timeslices import setup_module + getLogger(__name__).info("Reading MUSE settings") # The user data @@ -389,181 +390,16 @@ def read_settings( settings = add_known_parameters(default_settings, user_settings) settings = add_unknown_parameters(settings, user_settings) + # Set up timeslices + setup_module(settings) + settings.pop("timeslices", None) + # Finally, we run some checks to make sure all makes sense and files exist. validate_settings(settings) return convert(settings) -def read_ts_multiindex( - settings: Optional[Union[Mapping, str]] = None, - timeslice: Optional[xr.DataArray] = None, - transforms: Optional[dict[tuple, np.ndarray]] = None, -) -> pd.MultiIndex: - '''Read multiindex for a timeslice from TOML. - - Example: - The timeslices are read from ``timeslice_levels``. The levels (keyword) and - slice (list of values) correspond to the level, slices and slice aggregates - defined in the the ``timeslices`` section. - - >>> toml = """ - ... ["timeslices"] - ... winter.weekday.day = 5 - ... winter.weekday.night = 5 - ... winter.weekend.day = 2 - ... winter.weekend.night = 2 - ... winter.weekend.dusk = 1 - ... summer.weekday.day = 5 - ... summer.weekday.night = 5 - ... summer.weekend.day = 2 - ... summer.weekend.night = 2 - ... summer.weekend.dusk = 1 - ... level_names = ["semester", "week", "day"] - ... aggregates.allday = ["day", "night"] - ... [timeslice_levels] - ... day = ["dusk", "allday"] - ... """ - >>> from muse.timeslices import ( - ... reference_timeslice, aggregate_transforms - ... ) - >>> from muse.readers.toml import read_ts_multiindex - >>> ref = reference_timeslice(toml) - >>> transforms = aggregate_transforms(toml, ref) - >>> read_ts_multiindex(toml, ref, transforms) - MultiIndex([('summer', 'weekday', 'allday'), - ('summer', 'weekend', 'dusk'), - ('summer', 'weekend', 'allday'), - ('winter', 'weekday', 'allday'), - ('winter', 'weekend', 'dusk'), - ('winter', 'weekend', 'allday')], - names=['semester', 'week', 'day']) - - It is an error to refer to a level or a slice that does not exist: - - >>> read_ts_multiindex(dict(days=["dusk", "allday"]), ref, transforms) - Traceback (most recent call last): - ... - muse.readers.toml.IncorrectSettings: Unexpected level name(s): ... - >>> read_ts_multiindex(dict(day=["usk", "allday"]), ref, transforms) - Traceback (most recent call last): - ... - muse.readers.toml.IncorrectSettings: Unexpected slice(s): ... - ''' - from itertools import product - - from toml import loads - - from muse.timeslices import TIMESLICE, TRANSFORMS - - indices = (TIMESLICE if timeslice is None else timeslice).get_index("timeslice") - if transforms is None: - transforms = TRANSFORMS - if isinstance(settings, str): - settings = loads(settings) - elif settings is None: - return indices - elif not isinstance(settings, Mapping): - settings = undo_damage(settings) - settings = settings.get("timeslice_levels", settings) - assert isinstance(settings, Mapping) - if not set(settings).issubset(indices.names): - msg = "Unexpected level name(s): " + ", ".join( - set(settings).difference(indices.names) - ) - raise IncorrectSettings(msg) - levels = [ - settings.get(name, level) for name, level in zip(indices.names, indices.levels) - ] - levels = [[level] if isinstance(level, str) else level for level in levels] - for i, level in enumerate(levels): - known = [index[i] for index in transforms if len(index) > i] - unexpected = set(level).difference(known) - if unexpected: - raise IncorrectSettings("Unexpected slice(s): " + ", ".join(unexpected)) - return pd.MultiIndex.from_tuples( - [index for index in product(*levels) if index in transforms], - names=indices.names, - ) - - -def read_timeslices( - settings: Optional[Union[str, Mapping]] = None, - timeslice: Optional[xr.DataArray] = None, - transforms: Optional[dict[tuple, np.ndarray]] = None, -) -> xr.Dataset: - '''Reads timeslice levels and create resulting timeslice coordinate. - - Args: - settings: TOML dictionary. It should contain a ``timeslice_levels`` section. - Otherwise, the timeslices will default to the global (finest) timeslices. - timeslice: Finest timeslices. Defaults to the global in - :py:mod:`~muse.timeslices`. If using the default, then this function - should be called *after* the timeslice module has been setup with a call to - :py:func:`~muse.timeslice.setup_module`. - transforms: Transforms from desired timeslices to the finest timeslice. Defaults - to the global in :py:mod:`~muse.timeslices`. If using the default, - then this function should be called *after* the timeslice module has been - setup with a call to :py:func:`~muse.timeslice.setup_module`. - - Returns: - A xr.Dataset with the timeslice coordinates. - - Example: - >>> toml = """ - ... ["timeslices"] - ... winter.weekday.day = 5 - ... winter.weekday.night = 5 - ... winter.weekend.day = 2 - ... winter.weekend.night = 2 - ... winter.weekend.dusk = 1 - ... summer.weekday.day = 5 - ... summer.weekday.night = 5 - ... summer.weekend.day = 2 - ... summer.weekend.night = 2 - ... summer.weekend.dusk = 1 - ... level_names = ["semester", "week", "day"] - ... aggregates.allday = ["day", "night"] - ... [timeslice_levels] - ... day = ["dusk", "allday"] - ... """ - >>> from muse.timeslices import ( - ... reference_timeslice, aggregate_transforms - ... ) - >>> from muse.readers.toml import read_timeslices - >>> ref = reference_timeslice(toml) - >>> transforms = aggregate_transforms(toml, ref) - >>> ts = read_timeslices(toml, ref, transforms) - >>> assert "semester" in ts.coords - >>> assert "week" in ts.coords - >>> assert "day" in ts.coords - >>> assert "represent_hours" in ts.coords - >>> assert set(ts.coords["day"].data) == {"dusk", "allday"} - >>> assert set(ts.coords["week"].data) == {"weekday", "weekend"} - >>> assert set(ts.coords["semester"].data) == {"summer", "winter"} - ''' - from muse.timeslices import TIMESLICE, timeslice_projector - - if timeslice is None: - timeslice = TIMESLICE - if settings is None: - return xr.Dataset({"represent_hours": timeslice}).set_coords("represent_hours") - indices = read_ts_multiindex(settings, timeslice=timeslice, transforms=transforms) - units = xr.DataArray( - np.ones(len(indices)), coords={"timeslice": indices}, dims="timeslice" - ) - proj = timeslice_projector(units, finest=timeslice, transforms=transforms) - proj *= xr.DataArray( - timeslice.values, - coords={"finest_timeslice": proj.finest_timeslice}, - dims="finest_timeslice", - ) - - return xr.Dataset({"represent_hours": proj.sum("finest_timeslice")}).set_coords( - "represent_hours" - ) - - def add_known_parameters(dd, u, parent=None): """Function for updating the settings dictionary recursively. @@ -761,20 +597,6 @@ def check_iteration_control(settings: dict) -> None: assert settings["tolerance"] > 0, msg -@register_settings_check(vary_name=False) -def check_time_slices(settings: dict) -> None: - """Check the time slices. - - If there is no error, they are transformed into a xr.DataArray - """ - from muse.timeslices import setup_module - - setup_module(settings) - settings["timeslices"] = read_timeslices( - settings.get("mca", settings).get("timeslice_levels", None) - ).timeslice - - @register_settings_check(vary_name=False) def check_global_data_files(settings: dict) -> None: """Checks that the global user files exist.""" diff --git a/src/muse/sectors/__init__.py b/src/muse/sectors/__init__.py index 3aaf68d7d..64efbd7a0 100644 --- a/src/muse/sectors/__init__.py +++ b/src/muse/sectors/__init__.py @@ -7,8 +7,6 @@ investing in new assets. - :class:`~muse.sectors.preset_sector.PresetSector`: A sector that is meant to generate demand for the sectors above using a fixed formula or schedule. -- :class:`~muse.sectors.legacy_sector.LegacySector`: A wrapper around the original MUSE - sectors. All the sectors derive from :class:`AbstractSector`. The :class:`AbstractSector` defines two `abstract`__ functions which should be declared by derived sectors. `Abstract`__ @@ -32,13 +30,11 @@ __all__ = [ "SECTORS_REGISTERED", "AbstractSector", - "LegacySector", "PresetSector", "Sector", "register_sector", ] from muse.sectors.abstract import AbstractSector -from muse.sectors.legacy_sector import LegacySector from muse.sectors.preset_sector import PresetSector from muse.sectors.register import SECTORS_REGISTERED, register_sector from muse.sectors.sector import Sector diff --git a/src/muse/sectors/abstract.py b/src/muse/sectors/abstract.py index 231b444c4..4fe560531 100644 --- a/src/muse/sectors/abstract.py +++ b/src/muse/sectors/abstract.py @@ -29,7 +29,6 @@ def factory(cls, name: str, settings: Any) -> AbstractSector: @abstractmethod def next(self, mca_market: Dataset) -> Dataset: """Advance sector by one time period.""" - pass def __repr__(self): return f"<{self.name.title()} sector - object at {hex(id(self))}>" diff --git a/src/muse/sectors/legacy_sector.py b/src/muse/sectors/legacy_sector.py deleted file mode 100644 index ad61cdc6f..000000000 --- a/src/muse/sectors/legacy_sector.py +++ /dev/null @@ -1,451 +0,0 @@ -"""This module defines the LegacySector class. - -This is needed to interface the new MCA with the old MUSE sectors. It can be deleted -once accessing those sectors is no longer needed. -""" - -from collections.abc import Sequence -from dataclasses import dataclass -from itertools import chain -from logging import getLogger -from typing import Any, Union - -import numpy as np -import pandas as pd -from xarray import DataArray, Dataset - -from muse.readers import read_csv_timeslices, read_initial_market -from muse.sectors.abstract import AbstractSector -from muse.sectors.register import register_sector -from muse.timeslices import QuantityType, new_to_old_timeslice - - -@dataclass -class LegacyMarket: - BaseYear: int - EndYear: int - Foresight: np.ndarray - TimeFramework: np.ndarray - YearlyTimeFramework: np.ndarray - NYears: list - GlobalCommoditiesAttributes: np.ndarray - CommoditiesBudget: list - macro_drivers: pd.DataFrame - dfRegions: pd.DataFrame - Regions: np.ndarray - interpolation_mode: str - - -@register_sector(name="legacy") -class LegacySector(AbstractSector): # type: ignore - @classmethod - def factory(cls, name: str, settings: Any, **kwargs) -> "LegacySector": - from pathlib import Path - - from muse_legacy.sectors import SECTORS - - from muse.readers import read_technologies - - sector = getattr(settings.sectors, name) - - settings_dir = sector.userdata_path - sectors_dir = Path(sector.technodata_path).parent - excess = sector.excess - - base_year = settings.time_framework[0] - end_year = settings.time_framework[-1] - - path = settings.global_input_files.macrodrivers - macro_drivers = pd.read_csv(path).sort_index(ascending=True) - - path = settings.global_input_files.regions - regions = pd.read_csv(path).sort_index(ascending=True) - global_commodities = read_technologies( - Path(sector.technodata_path) / f"technodata{name.title()}.csv", - None, - Path(sector.technodata_path) / f"commOUTtechnodata{name.title()}.csv", - Path(sector.technodata_path) / f"commINtechnodata{name.title()}.csv", - commodities=settings.global_input_files.global_commodities, - )[["heat_rate", "unit", "emmission_factor"]] - - interpolation_mode = ( - "Active" if settings.interpolation_mode == "linear" else "off" - ) - - market = LegacyMarket( - BaseYear=base_year, - EndYear=end_year, - Foresight=np.array([settings.foresight]), - TimeFramework=settings.time_framework, - YearlyTimeFramework=np.arange(base_year, end_year + 1, 1, dtype=int), - NYears=list(np.diff(settings.time_framework)), - GlobalCommoditiesAttributes=global_commodities.commodity.values, - CommoditiesBudget=settings.carbon_budget_control.commodities, - macro_drivers=macro_drivers, - dfRegions=regions, - Regions=np.array(settings.regions), - interpolation_mode=interpolation_mode, - ) - - timeslices, aggregation = cls.load_timeslices_and_aggregation( - settings.timeslices, settings.sectors - ) - timeslices = { - "prices": timeslices["prices"], - "finest": timeslices["finest"], - "finest aggregation": aggregation, - name: timeslices[name], - } - - initial = ( - read_initial_market( - settings.global_input_files.projections, - base_year_export=getattr( - settings.global_input_files, "base_year_export", None - ), - base_year_import=getattr( - settings.global_input_files, "base_year_import", None - ), - timeslices=timeslices["prices"], - ) - .sel(region=settings.regions) - .interp(year=settings.time_framework, method=settings.interpolation_mode) - ) - commodity_price = initial["prices"] - static_trade = initial["static_trade"] - - old_sector = SECTORS[name]( - market=market, sectors_dir=sectors_dir, settings_dir=settings_dir - ) - - old_sector.SectorCommoditiesOUT = commodities_idx(old_sector, "OUT") - old_sector.SectorCommoditiesIN = commodities_idx(old_sector, "IN") - old_sector.SectorCommoditiesNotENV = commodities_idx(old_sector, "NotENV") - - sector_comm = list( - set(old_sector.SectorCommoditiesOUT).union(old_sector.SectorCommoditiesIN) - ) - - commodities = { - "global": global_commodities, - name: global_commodities.isel(commodity=sector_comm), - } - - msg = f"LegacySector {name} created successfully." - getLogger(__name__).info(msg) - return cls( - name, - old_sector, - timeslices, - commodities, - commodity_price, - static_trade, - settings.regions, - settings.time_framework, - "Calibration" if getattr(settings, "calibration", False) else "Iteration", - excess, - "converged", - str(sectors_dir), - str(sector.output_path), - ) - - def __init__( - self, - name: str, - old_sector, - timeslices: dict, - commodities: dict, - commodity_price: DataArray, - static_trade: DataArray, - regions: Sequence, - time_framework: np.ndarray, - mode: str, - excess: Union[int, float], - market_iterative: str, - sectors_dir: str, - output_dir: str, - ): - super().__init__() - self.name = name - """Name of the sector""" - self.old_sector = old_sector - """Legacy sector method to run the calculation""" - assert "prices" in timeslices - assert "finest" in timeslices - assert name in timeslices - self.timeslices = timeslices - """Timeslices for sectors and mca.""" - self.commodities = commodities - """Commodities for each sector, as well as global commodities.""" - self.commodity_price = commodity_price - """Initial price of all the commodities.""" - self.static_trade = static_trade - """Static trade needed for the conversion and supply sectors.""" - self.regions = regions - """Regions taking part in the simulation.""" - self.time_framework = time_framework - """Time framework of the complete simulation.""" - self.mode = mode - """If 'Calibration', the sector runs in calibration mode""" - self.excess = excess - """Allowed excess of capacity.""" - self.market_iterative = market_iterative - """ -----> TODO what's this parameter?""" - self.sectors_dir = sectors_dir - """Sectors directory.""" - self.output_dir = output_dir - """Outputs directory.""" - self.dims = ("commodity", "region", "year", "timeslice") - """Order of the input and output dimensions.""" - self.calibrated = False - """Flag if the sector has gone through the calibration process.""" - - def next(self, market: Dataset) -> Dataset: - """Adapter between the old and the new.""" - from muse_legacy.sectors.sector import Demand - - self.commodity_price.loc[{"year": market.year}] = market.prices - - # Consumption in Conversion and Supply sectors depend on the static trade - # TODO This might need to go outside, in the MCA since it will affect all - # sectors, not just the legacy ones. But static trade seems to be always zero, - # so not sure how useful it might be. - if not issubclass(type(self.old_sector), Demand): - consumption = ( - market.consumption - self.static_trade.sel(year=market.year) - ).clip(min=0.0) - else: - consumption = market.consumption.copy() - - converted = self.inputs( - consumption=consumption, supply=market.supply, prices=self.commodity_price - ) - - idx = int(np.argwhere(self.time_framework == market.year.values[0])) - - result = self.runprocessmodule( - converted.consumption, - converted.supplycost, - converted.supply, - (idx, market.year.values[0]), - ) - - result = self.outputs( - consumption=result.consumption, - supply=result.supply, - prices=result.supplycost, - ).sel(year=market.year) - - result["comm_usage"] = self.commodities[self.name].comm_usage - result = result.set_coords("comm_usage") - - # Prices in Demand sectors should not change. - if issubclass(type(self.old_sector), Demand): - result["prices"] = self.commodity_price.copy() - - return result - - def runprocessmodule(self, consumption, supplycost, supply, t): - params = [ - consumption, - supplycost, - supply, - new_to_old_timeslice(self.timeslices["prices"]), - new_to_old_timeslice( - self.timeslices["finest"], self.timeslices["finest aggregation"] - ), - t, - self.mode, - ] - - inputs = {"output_dir": self.output_dir, "sectors_dir": self.sectors_dir} - - if self.name == "Power": - if self.mode == "Calibration": - params += [self.market_iterative] - result = self.old_sector.power_calibration(*params, **inputs) - self.mode = "Iteration" - else: - self.mode = "Iteration" - params += [self.old_sector.instance, self.market_iterative, self.excess] - result = self.old_sector.runprocessmodule(*params, **inputs) - else: - params += [self.market_iterative, self.excess] - result = self.old_sector.runprocessmodule(*params, **inputs) - - self.old_sector.report(result, t[1], self.output_dir) - - return result - - @staticmethod - def load_timeslices_and_aggregation(timeslices, sectors) -> tuple[dict, str]: - """Loads all sector timeslices and finds the finest one.""" - timeslices = {"prices": timeslices.rename("prices timeslices")} - finest = timeslices["prices"].copy() - aggregation = "month" - - for sector in sectors.list: - sector_ts = read_csv_timeslices( - getattr(sectors, sector).timeslices_path - ).rename(sector + " timeslice") - timeslices[sector] = sector_ts - - # Now we get the finest - if len(finest) < len(sector_ts): - finest = timeslices[sector] - aggregation = getattr(sectors, sector).agregation_level - elif len(finest) == len(sector_ts) and any( - finest.get_index("timeslice") != sector_ts.get_index("timeslice") - ): - raise ValueError("Timeslice order do not match") - - timeslices["finest"] = finest - timeslices["finest"] = timeslices["finest"].rename("finest timeslice") - - return timeslices, aggregation - - @property - def global_commodities(self): - """List of all commodities used by the MCA.""" - return self.commodities["global"].commodity.values - - @property - def sector_commodities(self): - """List of all commodities used by the Sector.""" - return self.commodities[self.name].commodity.values - - @property - def sector_timeslices(self): - """List of all commodities used by the MCA.""" - return self.timeslices[self.name] - - def _to(self, data: np.ndarray, data_ts, ts: pd.MultiIndex, qt: QuantityType): - """From ndarray to dataarray.""" - return ndarray_to_xarray( - years=self.time_framework, - data=data, - ts=ts, - qt=qt, - global_commodities=self.global_commodities, - sector_commodities=self.sector_commodities, - data_ts=data_ts, - dims=self.dims, - regions=self.regions, - ) - - def _from(self, xdata: DataArray, ts: pd.MultiIndex, qt: QuantityType): - """From dataarray to ndarray.""" - return xarray_to_ndarray( - years=self.time_framework, - xdata=xdata, - ts=ts, - qt=qt, - global_commodities=self.global_commodities, - dims=self.dims, - regions=self.regions, - ) - - def outputs( - self, consumption: np.ndarray, prices: np.ndarray, supply: np.ndarray - ) -> Dataset: - """Converts MUSE numpy outputs to xarray.""" - from muse.timeslices import QuantityType - - finest, prices_ts = self.timeslices["finest"], self.timeslices["prices"] - c = self._to(consumption, finest, prices_ts, QuantityType.EXTENSIVE) - s = self._to(supply, self.sector_timeslices, prices_ts, QuantityType.EXTENSIVE) - p = self._to(prices, self.sector_timeslices, prices_ts, QuantityType.INTENSIVE) - return Dataset({"consumption": c, "supply": s, "costs": p}) - - def inputs(self, consumption: DataArray, prices: DataArray, supply: DataArray): - """Converts xarray to MUSE numpy input arrays.""" - from muse_legacy.sectors.sector import Sector as OriginalSector - - MarketVars = OriginalSector.MarketVars - - finest, prices_ts = self.timeslices["finest"], self.timeslices["prices"] - c = self._from(consumption, finest, QuantityType.EXTENSIVE) - s = self._from(supply, finest, QuantityType.EXTENSIVE) - p = self._from(prices, prices_ts, QuantityType.INTENSIVE) - - return MarketVars(consumption=c, supply=s, supplycost=p) - - -def ndarray_to_xarray( - years: np.ndarray, - data: np.ndarray, - ts: pd.MultiIndex, - qt: QuantityType, - global_commodities: DataArray, - sector_commodities: DataArray, - data_ts: pd.MultiIndex, - dims: Sequence[str], - regions: Sequence[str], -) -> DataArray: - """From ndarray to dataarray.""" - from collections.abc import Hashable, Mapping - - from muse.timeslices import convert_timeslice - - coords: Mapping[Hashable, Any] = { - "year": years, - "commodity": global_commodities, - "region": regions, - "timeslice": data_ts, - } - result = convert_timeslice(DataArray(data, coords=coords, dims=dims), ts, qt) - assert isinstance(result, DataArray) - return result.sel(commodity=sector_commodities).transpose(*dims) - - -def xarray_to_ndarray( - years: np.ndarray, - xdata: DataArray, - ts: pd.MultiIndex, - qt: QuantityType, - global_commodities: DataArray, - dims: Sequence[str], - regions: Sequence[str], -) -> np.ndarray: - """From dataarray to ndarray.""" - from collections.abc import Hashable, Mapping - - from muse.timeslices import convert_timeslice - - coords: Mapping[Hashable, Any] = { - "year": years, - "commodity": global_commodities, - "region": regions, - "timeslice": ts, - } - warp = np.zeros((len(global_commodities), len(regions), len(years), len(ts))) - result = DataArray(warp, coords=coords, dims=dims) - result.loc[{"year": xdata.year}] = convert_timeslice(xdata, ts, qt).transpose(*dims) - - return result.values - - -def commodities_idx(sector, comm: str) -> Sequence: - """Gets the indices of the commodities involved in the processes of the sector. - - Arguments: - sector: The old MUSE sector of interest - comm: Either "OUT", "IN" or "NotENV" - - Returns: - A list with the indexes - """ - comm = { - "OUT": "listIndexCommoditiesOUT", - "IN": "listIndexCommoditiesIN", - "NotENV": "listIndexNotEnvironmental", - }[comm] - - comm_list = chain.from_iterable( - chain.from_iterable( - [[c for c in p.__dict__[comm]] for p in wp.processes + wp.OtherProcesses] - for wp in sector - ) - ) - - return list({item for item in comm_list}) diff --git a/src/muse/sectors/preset_sector.py b/src/muse/sectors/preset_sector.py index 7a35bc2a3..03bf20080 100644 --- a/src/muse/sectors/preset_sector.py +++ b/src/muse/sectors/preset_sector.py @@ -28,17 +28,14 @@ def factory(cls, name: str, settings: Any) -> PresetSector: read_presets, read_regression_parameters, read_timeslice_shares, - read_timeslices, ) from muse.regressions import endogenous_demand - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import TIMESLICE, broadcast_timeslice, distribute_timeslice sector_conf = getattr(settings.sectors, name) presets = Dataset() - timeslice = read_timeslices( - getattr(sector_conf, "timeslice_levels", None) - ).timeslice + timeslice = TIMESLICE.timeslice if getattr(sector_conf, "consumption_path", None) is not None: consumption = read_presets(sector_conf.consumption_path) presets["consumption"] = consumption.assign_coords(timeslice=timeslice) @@ -71,22 +68,14 @@ def factory(cls, name: str, settings: Any) -> PresetSector: if getattr(sector_conf, "timeslice_shares_path", None) is not None: assert isinstance(timeslice, DataArray) - shares = read_timeslice_shares( - sector_conf.timeslice_shares_path, timeslice=timeslice - ) + shares = read_timeslice_shares(sector_conf.timeslice_shares_path) + shares = shares.assign_coords(timeslice=timeslice) assert consumption.commodity.isin(shares.commodity).all() assert consumption.region.isin(shares.region).all() - if "timeslice" in shares.dims: - ts = shares.timeslice - shares = drop_timeslice(shares) - consumption = (shares * consumption).assign_coords(timeslice=ts) - else: - consumption = consumption * shares.sel( - region=consumption.region, commodity=consumption.commodity - ) - presets["consumption"] = drop_timeslice(consumption).assign_coords( - timeslice=timeslice - ) + consumption = broadcast_timeslice(consumption) * shares.sel( + region=consumption.region, commodity=consumption.commodity + ) + presets["consumption"] = consumption if getattr(sector_conf, "supply_path", None) is not None: supply = read_presets(sector_conf.supply_path) @@ -121,9 +110,7 @@ def factory(cls, name: str, settings: Any) -> PresetSector: # add timeslice, if missing for component in {"supply", "consumption"}: if "timeslice" not in presets[component].dims: - presets[component] = convert_timeslice( - presets[component], timeslice, QuantityType.EXTENSIVE - ) + presets[component] = distribute_timeslice(presets[component]) comm_usage = (presets.costs > 0).any(set(presets.costs.dims) - {"commodity"}) presets["comm_usage"] = ( @@ -151,21 +138,13 @@ def __init__( def next(self, mca_market: Dataset) -> Dataset: """Advance sector by one time period.""" - from muse.timeslices import QuantityType, convert_timeslice - presets = self.presets.sel(region=mca_market.region) supply = self._interpolate(presets.supply, mca_market.year) consumption = self._interpolate(presets.consumption, mca_market.year) costs = self._interpolate(presets.costs, mca_market.year) - result = convert_timeslice( - Dataset({"supply": supply, "consumption": consumption}), - mca_market.timeslice, - QuantityType.EXTENSIVE, - ) - result["costs"] = drop_timeslice( - convert_timeslice(costs, mca_market.timeslice, QuantityType.INTENSIVE) - ) + result = Dataset({"supply": supply, "consumption": consumption}) + result["costs"] = drop_timeslice(costs) assert isinstance(result, Dataset) return result diff --git a/src/muse/sectors/sector.py b/src/muse/sectors/sector.py index b25a7455e..26e721606 100644 --- a/src/muse/sectors/sector.py +++ b/src/muse/sectors/sector.py @@ -7,7 +7,6 @@ cast, ) -import pandas as pd import xarray as xr from muse.agents import AbstractAgent @@ -15,6 +14,7 @@ from muse.sectors.abstract import AbstractSector from muse.sectors.register import register_sector from muse.sectors.subsector import Subsector +from muse.timeslices import compress_timeslice, expand_timeslice, get_level @register_sector(name="default") @@ -26,24 +26,22 @@ def factory(cls, name: str, settings: Any) -> Sector: from muse.interactions import factory as interaction_factory from muse.outputs.sector import factory as ofactory from muse.production import factory as pfactory - from muse.readers import read_timeslices from muse.readers.toml import read_technodata from muse.utilities import nametuple_to_dict + # Read sector settings sector_settings = getattr(settings.sectors, name)._asdict() for attribute in ("name", "type", "priority", "path"): sector_settings.pop(attribute, None) - - timeslices = read_timeslices( - sector_settings.pop("timeslice_levels", None) - ).get_index("timeslice") - - technologies = read_technodata(settings, name, settings.time_framework) - if "subsectors" not in sector_settings: raise RuntimeError(f"Missing 'subsectors' section in sector {name}") if len(sector_settings["subsectors"]._asdict()) == 0: raise RuntimeError(f"Empty 'subsectors' section in sector {name}") + + # Read technologies + technologies = read_technodata(settings, name, settings.time_framework) + + # Create subsectors subsectors = [ Subsector.factory( subsec_settings, @@ -51,17 +49,21 @@ def factory(cls, name: str, settings: Any) -> Sector: regions=settings.regions, current_year=int(min(settings.time_framework)), name=subsec_name, + timeslice_level=sector_settings.get("timeslice_level", None), ) for subsec_name, subsec_settings in sector_settings.pop("subsectors") ._asdict() .items() ] + + # Check that subsector commodities are disjoint are_disjoint_commodities = sum(len(s.commodities) for s in subsectors) == len( set().union(*(set(s.commodities) for s in subsectors)) # type: ignore ) if not are_disjoint_commodities: raise RuntimeError("Subsector commodities are not disjoint") + # Create outputs outputs = ofactory(*sector_settings.pop("outputs", []), sector_name=name) supply_args = sector_settings.pop( @@ -73,15 +75,21 @@ def factory(cls, name: str, settings: Any) -> Sector: supply_args = nametuple_to_dict(supply_args) supply = pfactory(**supply_args) + # Create interactions interactions = interaction_factory(sector_settings.pop("interactions", None)) - for attr in ("technodata", "commodities_out", "commodities_in"): + # Create sector + for attr in ( + "technodata", + "commodities_out", + "commodities_in", + "technodata_timeslices", + ): sector_settings.pop(attr, None) return cls( name, technologies, subsectors=subsectors, - timeslices=timeslices, supply_prod=supply, outputs=outputs, interactions=interactions, @@ -93,36 +101,42 @@ def __init__( name: str, technologies: xr.Dataset, subsectors: Sequence[Subsector] = [], - timeslices: pd.MultiIndex | None = None, - technodata_timeslices: xr.Dataset = None, interactions: Callable[[Sequence[AbstractAgent]], None] | None = None, interpolation: str = "linear", outputs: Callable | None = None, supply_prod: PRODUCTION_SIGNATURE | None = None, + timeslice_level: str | None = None, ): from muse.interactions import factory as interaction_factory from muse.outputs.sector import factory as ofactory from muse.production import maximum_production + from muse.timeslices import TIMESLICE - self.name: str = name """Name of the sector.""" - self.subsectors: Sequence[Subsector] = list(subsectors) + self.name: str = name + + """Timeslice level for the sector (e.g. "month").""" + self.timeslice_level = timeslice_level or get_level(TIMESLICE) + """Subsectors controlled by this object.""" - self.technologies: xr.Dataset = technologies + self.subsectors: Sequence[Subsector] = list(subsectors) + """Parameters describing the sector's technologies.""" - self.timeslices: pd.MultiIndex | None = timeslices - """Timeslice at which this sector operates. + self.technologies: xr.Dataset = technologies + if "timeslice" in self.technologies.dims: + if not get_level(self.technologies) == self.timeslice_level: + raise ValueError( + f"Technodata for {self.name} sector does not match " + "the specified timeslice level for that sector " + f"({self.timeslice_level})" + ) - If None, it will operate using the timeslice of the input market. - """ + """Interpolation method and arguments when computing years.""" self.interpolation: Mapping[str, Any] = { "method": interpolation, "kwargs": {"fill_value": "extrapolate"}, } - """Interpolation method and arguments when computing years.""" - if interactions is None: - interactions = interaction_factory() - self.interactions = interactions + """Interactions between agents. Called right before computing new investments, this function should manage any @@ -139,52 +153,41 @@ def __init__( :py:mod:`muse.interactions` contains MUSE's base interactions """ + self.interactions = interactions or interaction_factory() + + """A function for outputting data for post-mortem analysis.""" self.outputs: Callable = ( cast(Callable, ofactory()) if outputs is None else outputs ) - """A function for outputting data for post-mortem analysis.""" - self.supply_prod = ( - supply_prod if supply_prod is not None else maximum_production - ) - """ Computes production as used to return the supply to the MCA. + + """Computes production as used to return the supply to the MCA. It can be anything registered with :py:func:`@register_production`. """ - self.output_data: xr.Dataset + self.supply_prod = supply_prod or maximum_production + """Full supply, consumption and costs data for the most recent year.""" + self.output_data: xr.Dataset @property def forecast(self): """Maximum forecast horizon across agents. - If no agents with a "forecast" attribute are found, defaults to 5. It cannot be - lower than 1 year. + It cannot be lower than 1 year. """ - forecasts = [ - getattr(agent, "forecast") - for agent in self.agents - if hasattr(agent, "forecast") - ] - if len(forecasts) == 0: - return 5 + forecasts = [getattr(agent, "forecast") for agent in self.agents] return max(1, max(forecasts)) def next( self, mca_market: xr.Dataset, - time_period: int | None = None, - current_year: int | None = None, ) -> xr.Dataset: """Advance sector by one time period. Args: mca_market: Market with ``demand``, ``supply``, and ``prices``. - time_period: - Length of the time period in the framework. Defaults to the range of - ``mca_market.year``. - current_year: Current year of the simulation Returns: A market containing the ``supply`` offered by the sector, it's attendant @@ -195,47 +198,32 @@ def next( def group_assets(x: xr.DataArray) -> xr.DataArray: return xr.Dataset(dict(x=x)).groupby("region").sum("asset").x - if time_period is None: - time_period = int(mca_market.year.max() - mca_market.year.min()) - if current_year is None: - current_year = int(mca_market.year.min()) + time_period = int(mca_market.year.max() - mca_market.year.min()) + current_year = int(mca_market.year.min()) getLogger(__name__).info(f"Running {self.name} for year {current_year}") - # > to sector timeslice - market = self.convert_market_timeslice( - mca_market.sel( - commodity=self.technologies.commodity, region=self.technologies.region - ).interp( - year=sorted( - { - current_year, - current_year + time_period, - current_year + self.forecast, - } - ), - **self.interpolation, - ), - self.timeslices, - ) - # > agent interactions + # Agent interactions self.interactions(list(self.agents)) - # > investment - years = sorted( - set( - market.year.data.tolist() - + self.capacity.installed.data.tolist() - + self.technologies.year.data.tolist() - ) + + # Convert market to sector timeslicing + mca_market = self.convert_to_sector_timeslicing(mca_market) + + # Select appropriate data from the market + market = mca_market.sel( + commodity=self.technologies.commodity, region=self.technologies.region ) - technologies = self.technologies.interp(year=years, **self.interpolation) + # Investments for subsector in self.subsectors: subsector.invest( - technologies, market, time_period=time_period, current_year=current_year + self.technologies, + market, + time_period=time_period, + current_year=current_year, ) # Full output data - supply, consume, costs = self.market_variables(market, technologies) + supply, consume, costs = self.market_variables(market, self.technologies) self.output_data = xr.Dataset( dict( supply=supply, @@ -286,10 +274,13 @@ def group_assets(x: xr.DataArray) -> xr.DataArray: result = xr.Dataset( dict(supply=supply, consumption=consumption, costs=costs) ) - result = self.convert_market_timeslice(result, mca_market.timeslice) - result["comm_usage"] = technologies.comm_usage.sel(commodity=result.commodity) + result["comm_usage"] = self.technologies.comm_usage.sel( + commodity=result.commodity + ) result.set_coords("comm_usage") - return result + + # Convert result to global timeslicing scheme + return self.convert_to_global_timeslicing(result) def save_outputs(self) -> None: """Calls the outputs function with the current output data.""" @@ -300,32 +291,58 @@ def market_variables(self, market: xr.Dataset, technologies: xr.Dataset) -> Any: from muse.commodities import is_pollutant from muse.costs import annual_levelized_cost_of_energy, supply_cost from muse.quantities import consumption - from muse.timeslices import QuantityType, convert_timeslice from muse.utilities import broadcast_techs years = market.year.values capacity = self.capacity.interp(year=years, **self.interpolation) + # Calculate supply supply = self.supply_prod( - market=market, capacity=capacity, technologies=technologies + market=market, + capacity=capacity, + technologies=technologies, + timeslice_level=self.timeslice_level, ) - if "timeslice" in market.prices.dims and "timeslice" not in supply.dims: - supply = convert_timeslice(supply, market.timeslice, QuantityType.EXTENSIVE) - - consume = consumption(technologies, supply, market.prices) + # Calculate consumption + consume = consumption( + technologies, supply, market.prices, timeslice_level=self.timeslice_level + ) + # Calculate commodity prices technodata = cast(xr.Dataset, broadcast_techs(technologies, supply)) costs = supply_cost( supply.where(~is_pollutant(supply.comm_usage), 0), annual_levelized_cost_of_energy( - prices=market.prices.sel(region=supply.region), technologies=technodata + prices=market.prices.sel(region=supply.region), + technologies=technodata, + timeslice_level=self.timeslice_level, ), asset_dim="asset", ) return supply, consume, costs + def convert_to_sector_timeslicing(self, market: xr.Dataset) -> xr.Dataset: + """Converts market data to sector timeslicing.""" + supply = compress_timeslice( + market["supply"], level=self.timeslice_level, operation="sum" + ) + consumption = compress_timeslice( + market["consumption"], level=self.timeslice_level, operation="sum" + ) + prices = compress_timeslice( + market["prices"], level=self.timeslice_level, operation="mean" + ) + return xr.Dataset(dict(supply=supply, consumption=consumption, prices=prices)) + + def convert_to_global_timeslicing(self, market: xr.Dataset) -> xr.Dataset: + """Converts market data to global timeslicing.""" + supply = expand_timeslice(market["supply"], operation="distribute") + consumption = expand_timeslice(market["consumption"], operation="distribute") + costs = expand_timeslice(market["costs"], operation="broadcast") + return xr.Dataset(dict(supply=supply, consumption=consumption, costs=costs)) + @property def capacity(self) -> xr.DataArray: """Aggregates capacity across agents. @@ -346,6 +363,8 @@ def capacity(self) -> xr.DataArray: for u in self.agents if "dst_region" not in u.assets.capacity.dims ] + + # Only nontraded assets if not traded: full_list = [ list(nontraded[i].year.values) @@ -360,7 +379,9 @@ def capacity(self) -> xr.DataArray: if "dst_region" not in u.assets.capacity.dims ] return reduce_assets(nontraded) - if not nontraded: + + # Only traded assets + elif not nontraded: full_list = [ list(traded[i].year.values) for i in range(len(traded)) @@ -374,44 +395,21 @@ def capacity(self) -> xr.DataArray: if "dst_region" in u.assets.capacity.dims ] return reduce_assets(traded) - traded_results = reduce_assets(traded) - nontraded_results = reduce_assets(nontraded) - return reduce_assets( - [ - traded_results, - nontraded_results - * (nontraded_results.region == traded_results.dst_region), - ] - ) + + # Both traded and nontraded assets + else: + traded_results = reduce_assets(traded) + nontraded_results = reduce_assets(nontraded) + return reduce_assets( + [ + traded_results, + nontraded_results + * (nontraded_results.region == traded_results.dst_region), + ] + ) @property def agents(self) -> Iterator[AbstractAgent]: """Iterator over all agents in the sector.""" for subsector in self.subsectors: yield from subsector.agents - - @staticmethod - def convert_market_timeslice( - market: xr.Dataset, - timeslice: pd.MultiIndex, - intensive: str | tuple[str] = "prices", - ) -> xr.Dataset: - """Converts market from one to another timeslice.""" - from muse.timeslices import QuantityType, convert_timeslice - - if isinstance(intensive, str): - intensive = (intensive,) - - timesliced = {d for d in market.data_vars if "timeslice" in market[d].dims} - intensives = convert_timeslice( - market[list(timesliced.intersection(intensive))], - timeslice, - QuantityType.INTENSIVE, - ) - extensives = convert_timeslice( - market[list(timesliced.difference(intensives.data_vars))], - timeslice, - QuantityType.EXTENSIVE, - ) - others = market[list(set(market.data_vars).difference(timesliced))] - return xr.merge([intensives, extensives, others]) diff --git a/src/muse/sectors/subsector.py b/src/muse/sectors/subsector.py index 26ba1cb27..dc75222c7 100644 --- a/src/muse/sectors/subsector.py +++ b/src/muse/sectors/subsector.py @@ -1,10 +1,9 @@ from __future__ import annotations -from collections.abc import Hashable, MutableMapping, Sequence +from collections.abc import Sequence from typing import ( Any, Callable, - cast, ) import numpy as np @@ -27,6 +26,7 @@ def __init__( name: str = "subsector", forecast: int = 5, expand_market_prices: bool = False, + timeslice_level: str | None = None, ): from muse import constraints as cs from muse import demand_share as ds @@ -40,6 +40,7 @@ def __init__( self.forecast = forecast self.name = name self.expand_market_prices = expand_market_prices + self.timeslice_level = timeslice_level """Whether to expand prices to include destination region. If ``True``, the input market prices are expanded of the missing "dst_region" @@ -51,68 +52,40 @@ def invest( self, technologies: xr.Dataset, market: xr.Dataset, - time_period: int = 5, - current_year: int | None = None, + time_period: int, + current_year: int, ) -> None: - if current_year is None: - current_year = market.year.min() + # Expand prices to include destination region (for trade models) if self.expand_market_prices: market = market.copy() market["prices"] = drop_timeslice( np.maximum(market.prices, market.prices.rename(region="dst_region")) ) + # Agent housekeeping for agent in self.agents: agent.asset_housekeeping() - lp_problem = self.aggregate_lp( - technologies, market, time_period, current_year=current_year - ) - if lp_problem is None: - return - - years = technologies.year - techs = technologies.interp(year=years) - techs = techs.sel(year=current_year + time_period) - - solution = self.investment( - search=lp_problem[0], technologies=techs, constraints=lp_problem[1] - ) - - self.assign_back_to_agents(technologies, solution, current_year, time_period) - - def assign_back_to_agents( - self, - technologies: xr.Dataset, - solution: xr.DataArray, - current_year: int, - time_period: int, - ): - agents = {u.uuid: u for u in self.agents} - - for uuid, assets in solution.groupby("agent"): - agents[uuid].add_investments( - technologies, assets, current_year, time_period - ) + # Perform the investments + self.aggregate_lp(technologies, market, time_period, current_year=current_year) def aggregate_lp( self, technologies: xr.Dataset, market: xr.Dataset, - time_period: int = 5, - current_year: int | None = None, - ) -> tuple[xr.Dataset, Sequence[xr.Dataset]] | None: + time_period, + current_year, + ) -> None: from muse.utilities import agent_concatenation, reduce_assets - if current_year is None: - current_year = market.year.min() - + # Split demand across agents demands = self.demand_share( self.agents, market, technologies, current_year=current_year, forecast=self.forecast, + timeslice_level=self.timeslice_level, ) if "dst_region" in demands.dims: @@ -122,42 +95,27 @@ def aggregate_lp( dimension. """ raise ValueError(msg) - agent_market = market.copy() + + # Concatenate assets assets = agent_concatenation( {agent.uuid: agent.assets for agent in self.agents} ) + + # Calculate existing capacity + agent_market = market.copy() agent_market["capacity"] = ( reduce_assets(assets.capacity, coords=("region", "technology")) .interp(year=market.year, method="linear", kwargs={"fill_value": 0.0}) .swap_dims(dict(asset="technology")) ) - agent_lps: MutableMapping[Hashable, xr.Dataset] = {} + # Increment each agent (perform investments) for agent in self.agents: if "agent" in demands.coords: share = demands.sel(asset=demands.agent == agent.uuid) else: share = demands - result = agent.next( - technologies, agent_market, share, time_period=time_period - ) - if result is not None: - agent_lps[agent.uuid] = result - - if len(agent_lps) == 0: - return None - - lps = cast(xr.Dataset, agent_concatenation(agent_lps, dim="agent")) - coords = {"agent", "technology", "region"}.intersection(assets.asset.coords) - constraints = self.constraints( - demand=demands, - assets=reduce_assets(assets, coords=coords).set_coords(coords), - search_space=lps.search_space, - market=market, - technologies=technologies, - year=current_year, - ) - return lps, constraints + agent.next(technologies, agent_market, share, time_period=time_period) @classmethod def factory( @@ -167,7 +125,10 @@ def factory( regions: Sequence[str] | None = None, current_year: int | None = None, name: str = "subsector", + timeslice_level: str | None = None, ) -> Subsector: + from logging import getLogger + from muse import constraints as cs from muse import demand_share as ds from muse import investments as iv @@ -180,6 +141,13 @@ def factory( msg = "Invalid parameter asset_threshhold. Did you mean asset_threshold?" raise ValueError(msg) + # Raise warning if lpsolver is not specified (PR #587) + if not hasattr(settings, "lpsolver"): + msg = ( + f"lpsolver not specified for subsector '{name}'. Defaulting to 'scipy'" + ) + getLogger(__name__).warning(msg) + agents = agents_factory( settings.agents, settings.existing_capacity, @@ -188,9 +156,10 @@ def factory( year=current_year or int(technologies.year.min()), asset_threshold=getattr(settings, "asset_threshold", 1e-12), # only used by self-investing agents - investment=getattr(settings, "lpsolver", "adhoc"), + investment=getattr(settings, "lpsolver", "scipy"), forecast=getattr(settings, "forecast", 5), constraints=getattr(settings, "constraints", ()), + timeslice_level=timeslice_level, ) # technologies can have nans where a commodity # does not apply to a technology at all @@ -244,6 +213,7 @@ def factory( forecast=forecast, name=name, expand_market_prices=expand_market_prices, + timeslice_level=timeslice_level, ) diff --git a/src/muse/timeslices.py b/src/muse/timeslices.py index 96b8f2fc2..e69c4cdd5 100644 --- a/src/muse/timeslices.py +++ b/src/muse/timeslices.py @@ -1,599 +1,279 @@ """Timeslice utility functions.""" __all__ = [ - "aggregate_transforms", - "convert_timeslice", - "reference_timeslice", - "represent_hours", + "broadcast_timeslice", + "compress_timeslice", + "distribute_timeslice", + "drop_timeslice", + "expand_timeslice", + "get_level", + "read_timeslices", "setup_module", - "timeslice_projector", + "sort_timeslices", + "timeslice_max", ] from collections.abc import Mapping, Sequence -from enum import Enum, unique from typing import Optional, Union -import xarray as xr -from numpy import ndarray -from pandas import MultiIndex -from xarray import DataArray, Dataset - -from muse.readers import kebab_to_camel +import numpy as np +import pandas as pd +from xarray import DataArray TIMESLICE: DataArray = None # type: ignore -"""Array with the finest timeslice.""" -TRANSFORMS: dict[tuple, ndarray] = None # type: ignore -"""Transforms from each aggregate to the finest timeslice.""" - -DEFAULT_TIMESLICE_DESCRIPTION = """ - [timeslices] - winter.weekday.night = 396 - winter.weekday.morning = 396 - winter.weekday.afternoon = 264 - winter.weekday.early-peak = 66 - winter.weekday.late-peak = 66 - winter.weekday.evening = 396 - winter.weekend.night = 156 - winter.weekend.morning = 156 - winter.weekend.afternoon = 156 - winter.weekend.evening = 156 - spring-autumn.weekday.night = 792 - spring-autumn.weekday.morning = 792 - spring-autumn.weekday.afternoon = 528 - spring-autumn.weekday.early-peak = 132 - spring-autumn.weekday.late-peak = 132 - spring-autumn.weekday.evening = 792 - spring-autumn.weekend.night = 300 - spring-autumn.weekend.morning = 300 - spring-autumn.weekend.afternoon = 300 - spring-autumn.weekend.evening = 300 - summer.weekday.night = 396 - summer.weekday.morning = 396 - summer.weekday.afternoon = 264 - summer.weekday.early-peak = 66 - summer.weekday.late-peak = 66 - summer.weekday.evening = 396 - summer.weekend.night = 150 - summer.weekend.morning = 150 - summer.weekend.afternoon = 150 - summer.weekend.evening = 150 - level_names = ["month", "day", "hour"] - - [timeslices.aggregates] - all-day = [ - "night", "morning", "afternoon", "early-peak", "late-peak", "evening", "night" - ] - all-week = ["weekday", "weekend"] - all-year = ["winter", "summer", "spring-autumn"] - """ -def reference_timeslice( +def read_timeslices( settings: Union[Mapping, str], level_names: Sequence[str] = ("month", "day", "hour"), - name: str = "timeslice", ) -> DataArray: - '''Reads reference timeslice from toml like input. - - Arguments: - settings: A dictionary of nested dictionaries or a string that toml will - interpret as such. The nesting specifies different levels of the timeslice. - If a dictionary and it contains "timeslices" key, then the associated value - is used as the root dictionary. Ultimately, the most nested values should be - relative weights for each slice in the timeslice, e.g. the corresponding - number of hours. - level_names: Hints indicating the names of each level. Can also be given a - "level_names" key in ``settings``. - name: name of the reference array - - Return: - A ``DataArray`` with dimension *timeslice* and values representing the relative - weight of each timeslice. - - Example: - >>> from muse.timeslices import reference_timeslice - >>> reference_timeslice( - ... """ - ... [timeslices] - ... spring.weekday = 5 - ... spring.weekend = 2 - ... autumn.weekday = 5 - ... autumn.weekend = 2 - ... winter.weekday = 5 - ... winter.weekend = 2 - ... summer.weekday = 5 - ... summer.weekend = 2 - ... level_names = ["season", "week"] - ... """ - ... ) # doctest: +SKIP - Size: 32B - array([5, 2, 5, 2, 5, 2, 5, 2]) - Coordinates: - * timeslice (timeslice) object 64B MultiIndex - * season (timeslice) object 64B 'spring' 'spring' ... 'summer' 'summer' - * week (timeslice) object 64B 'weekday' 'weekend' ... 'weekend' - ''' from functools import reduce + from logging import getLogger from toml import loads + # Read timeslice settings if isinstance(settings, str): settings = loads(settings) settings = dict(**settings.get("timeslices", settings)) + + # Legacy: warn user about deprecation of "aggregates" feature (#550) + if "aggregates" in settings: + msg = ( + "Timeslice aggregation has been deprecated since v1.3.0. Please see the " + "release notes for that version for more information." + ) + getLogger(__name__).warning(msg) + settings.pop("aggregates") + + # Extract level names if "level_names" in settings: level_names = settings.pop("level_names") - settings.pop("aggregates", {}) - # figures out levels - levels: list[tuple] = [(level,) for level in settings] + # Extract timeslice levels and lengths ts = list(settings.values()) + levels: list[tuple] = [(level,) for level in settings] while all(isinstance(v, Mapping) for v in ts): levels = [(*previous, b) for previous, a in zip(levels, ts) for b in a] ts = reduce(list.__add__, (list(u.values()) for u in ts), []) + # Prepare multiindex nln = min(len(levels[0]), len(level_names)) level_names = ( list(level_names[:nln]) + [str(i) for i in range(len(levels[0]))][nln:] ) - indices = MultiIndex.from_tuples(levels, names=level_names) + indices = pd.MultiIndex.from_tuples(levels, names=level_names) + # Make sure names from different levels don't overlap if any( reduce(set.union, indices.levels[:i], set()).intersection(indices.levels[i]) for i in range(1, indices.nlevels) ): raise ValueError("Names from different levels should not overlap.") - return DataArray(ts, coords={"timeslice": indices}, dims=name) - - -def aggregate_transforms( - settings: Optional[Union[Mapping, str]] = None, - timeslice: Optional[DataArray] = None, -) -> dict[tuple, ndarray]: - '''Creates dictionary of transforms for aggregate levels. - - The transforms are used to create the projectors towards the finest timeslice. - - Arguments: - timeslice: a ``DataArray`` with the timeslice dimension. - settings: A dictionary mapping the name of an aggregate with the values it - aggregates, or a string that toml will parse as such. If not given, only the - unit transforms are returned. - - Return: - A dictionary of transforms for each possible slice to it's corresponding finest - timeslices. - - Example: - >>> toml = """ - ... [timeslices] - ... spring.weekday = 5 - ... spring.weekend = 2 - ... autumn.weekday = 5 - ... autumn.weekend = 2 - ... winter.weekday = 5 - ... winter.weekend = 2 - ... summer.weekday = 5 - ... summer.weekend = 2 - ... - ... [timeslices.aggregates] - ... spautumn = ["spring", "autumn"] - ... week = ["weekday", "weekend"] - ... """ - >>> from muse.timeslices import reference_timeslice, aggregate_transforms - >>> ref = reference_timeslice(toml) - >>> transforms = aggregate_transforms(toml, ref) - >>> transforms[("spring", "weekend")] - array([0, 1, 0, 0, 0, 0, 0, 0]) - >>> transforms[("spautumn", "weekday")] - array([1, 0, 1, 0, 0, 0, 0, 0]) - >>> transforms[("autumn", "week")].T - array([0, 0, 1, 1, 0, 0, 0, 0]) - >>> transforms[("spautumn", "week")].T - array([1, 1, 1, 1, 0, 0, 0, 0]) - ''' - from itertools import product - - from numpy import identity, sum - from toml import loads - - if timeslice is None: - timeslice = TIMESLICE - if settings is None: - settings = {} - elif isinstance(settings, str): - settings = loads(settings) - - # get timeslice dimension - Id = identity(len(timeslice), dtype=int) - indices = timeslice.get_index("timeslice") - unitvecs: dict[tuple, ndarray] = {index: Id[i] for (i, index) in enumerate(indices)} - if "timeslices" in settings or "aggregates" in settings: - settings = settings.get("timeslices", settings).get("aggregates", {}) - assert isinstance(settings, Mapping) - - assert set(settings).intersection(unitvecs) == set() - levels = [list(level) for level in indices.levels] - for name, equivalent in settings.items(): - matching_levels = [ - set(level).issuperset(equivalent) for level in indices.levels - ] - if sum(matching_levels) == 0: - raise ValueError(f"Could not find matching level for {equivalent}") - elif sum(matching_levels) > 1: - raise ValueError(f"Found more than one matching level for {equivalent}") - level = matching_levels.index(True) - levels[level].append(name) - - result: dict[tuple, ndarray] = {} - for index in set(product(*levels)).difference(unitvecs): - if not any(level in settings for level in index): - continue - agglevels = set(product(*(settings.get(level, [level]) for level in index))) - result[index] = sum( - [unitvecs[agg] for agg in unitvecs if agg in agglevels], axis=0 - ) - result.update(unitvecs) - return result + # Create DataArray + return DataArray(ts, coords={"timeslice": indices}, dims="timeslice") def setup_module(settings: Union[str, Mapping]): """Sets up module singletons.""" global TIMESLICE - global TRANSFORMS - TIMESLICE = reference_timeslice(settings) - TRANSFORMS = aggregate_transforms(settings, TIMESLICE) + TIMESLICE = read_timeslices(settings) -def timeslice_projector( - x: Union[DataArray, MultiIndex], - finest: Optional[DataArray] = None, - transforms: Optional[dict[tuple, ndarray]] = None, +def broadcast_timeslice( + data: DataArray, ts: Optional[DataArray] = None, level: Optional[str] = None ) -> DataArray: - '''Project time-slice to standardized finest time-slices. - - Returns a matrix from the input timeslice ``x`` to the ``finest`` timeslice, using - the input ``transforms``. The latter are a set of transforms that map indices from - one timeslice to indices in another. - - Example: - Lets define the following timeslices and aggregates: - - >>> toml = """ - ... ["timeslices"] - ... winter.weekday.day = 5 - ... winter.weekday.night = 5 - ... winter.weekend.day = 2 - ... winter.weekend.night = 2 - ... winter.weekend.dusk = 1 - ... summer.weekday.day = 5 - ... summer.weekday.night = 5 - ... summer.weekend.day = 2 - ... summer.weekend.night = 2 - ... summer.weekend.dusk = 1 - ... level_names = ["semester", "week", "day"] - ... aggregates.allday = ["day", "night"] - ... """ - >>> from muse.timeslices import ( - ... reference_timeslice, aggregate_transforms - ... ) - >>> ref = reference_timeslice(toml) - >>> transforms = aggregate_transforms(toml, ref) - >>> from pandas import MultiIndex - >>> input_ts = DataArray( - ... [1, 2, 3], - ... coords={ - ... "timeslice": MultiIndex.from_tuples( - ... [ - ... ("winter", "weekday", "allday"), - ... ("winter", "weekend", "dusk"), - ... ("summer", "weekend", "night"), - ... ], - ... names=ref.get_index("timeslice").names, - ... ), - ... }, - ... dims="timeslice" - ... ) - >>> input_ts # doctest: +SKIP - Size: 12B - array([1, 2, 3]) - Coordinates: - * timeslice (timeslice) object 24B MultiIndex - * semester (timeslice) object 24B 'winter' 'winter' 'summer' - * week (timeslice) object 24B 'weekday' 'weekend' 'weekend' - * day (timeslice) object 24B 'allday' 'dusk' 'night' - - The input timeslice does not have to be complete. In any case, we can now - compute a transform, i.e. a matrix that will take this timeslice and transform - it to the equivalent times in the finest timeslice: - - >>> from muse.timeslices import timeslice_projector - >>> timeslice_projector(input_ts, ref, transforms) # doctest: +SKIP - Size: 120B - array([[1, 0, 0], - [1, 0, 0], - [0, 0, 0], - [0, 0, 0], - [0, 1, 0], - [0, 0, 0], - [0, 0, 0], - [0, 0, 0], - [0, 0, 1], - [0, 0, 0]]) - Coordinates: - * finest_timeslice (finest_timeslice) object 80B MultiIndex - * finest_semester (finest_timeslice) object 80B 'winter' ... 'summer' - * finest_week (finest_timeslice) object 80B 'weekday' ... 'weekend' - * finest_day (finest_timeslice) object 80B 'day' 'night' ... 'dusk' - * timeslice (timeslice) object 24B MultiIndex - * semester (timeslice) object 24B 'winter' 'winter' 'summer' - * week (timeslice) object 24B 'weekday' 'weekend' 'weekend' - * day (timeslice) object 24B 'allday' 'dusk' 'night' - - It is possible to give as input an array which does not have a timeslice of its - own: - - >>> nots = DataArray([5.0, 1.0, 2.0], dims="a", coords={'a': [1, 2, 3]}) - >>> timeslice_projector(nots, ref, transforms).T # doctest: +SKIP - Size: 40B - array([[1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]) - Coordinates: - * finest_timeslice (finest_timeslice) object 80B MultiIndex - * finest_semester (finest_timeslice) object 80B 'winter' ... 'summer' - * finest_week (finest_timeslice) object 80B 'weekday' ... 'weekend' - * finest_day (finest_timeslice) object 80B 'day' 'night' ... 'dusk' - Dimensions without coordinates: timeslice - ''' - from numpy import concatenate, ones_like - from xarray import DataArray - - if finest is None: - global TIMESLICE - finest = TIMESLICE - if transforms is None: - global TRANSFORMS - transforms = TRANSFORMS - - index = finest.get_index("timeslice") - index = index.set_names(f"finest_{u}" for u in index.names) - - if isinstance(x, MultiIndex): - timeslices = x - elif "timeslice" in x.dims: - timeslices = x.get_index("timeslice") - else: - return DataArray( - ones_like(finest, dtype=int)[:, None], - coords={"finest_timeslice": index}, - dims=("finest_timeslice", "timeslice"), + """Convert a non-timesliced array to a timesliced array by broadcasting. + + If data is already timesliced in the appropriate scheme, it will be returned + unchanged. + + Args: + data: Array to broadcast. + ts: Dataarray with timeslice lengths. If None, defaults to the global timeslice. + level: Level to broadcast to. If None, use the finest level of ts. + + """ + from xarray import Coordinates + + if ts is None: + ts = TIMESLICE + + if level is not None: + ts = compress_timeslice(ts, ts=ts, level=level, operation="sum") + + # If data already has timeslices, check that it matches the reference timeslice. + if "timeslice" in data.dims: + if data.timeslice.reset_coords(drop=True).equals(ts.timeslice): + return data + raise ValueError( + "Data is already timesliced, but does not match the reference." ) - return DataArray( - concatenate([transforms[index][:, None] for index in timeslices], axis=1), - coords={"finest_timeslice": index, "timeslice": timeslices}, - dims=("finest_timeslice", "timeslice"), - name="projector", + mindex_coords = Coordinates.from_pandas_multiindex(ts.timeslice, "timeslice") + broadcasted = data.expand_dims(timeslice=ts["timeslice"]).assign_coords( + mindex_coords ) + return broadcasted + +def distribute_timeslice( + data: DataArray, ts: Optional[DataArray] = None, level=None +) -> DataArray: + """Convert a non-timesliced array to a timesliced array by distribution. -@unique -class QuantityType(Enum): - """Underlying transformation when performing time-slice conversion. + If data is already timesliced in the appropriate scheme, it will be returned + unchanged. - The meaning of a quantity vs the time-slice can be different: + Args: + data: Array to distribute. + ts: Dataarray with timeslice lengths. If None, defaults to the global timeslice. + level: Level to distribute to. If None, use the finest level of ts. - - intensive: when extending the period of interest, quantities should be - added together. For instance the number of hours should be summed across - months. - - extensive: when extending the period of interest, quantities should be - broadcasted. For instance when extending a price from a one week period to - a two week period, the price should remain the same. Going in the opposite - direction (reducing the length of the time period), quantities should be - averaged. """ + if ts is None: + ts = TIMESLICE + + if level is not None: + ts = compress_timeslice(ts, ts=ts, level=level, operation="sum") + + # If data already has timeslices, check that it matches the reference timeslice. + if "timeslice" in data.dims: + if data.timeslice.reset_coords(drop=True).equals(ts.timeslice): + return data + raise ValueError( + "Data is already timesliced, but does not match the reference." + ) + + broadcasted = broadcast_timeslice(data, ts=ts) + timeslice_fractions = ts / broadcast_timeslice(ts.sum(), ts=ts) + return broadcasted * timeslice_fractions + + +def compress_timeslice( + data: DataArray, + ts: Optional[DataArray] = None, + level: Optional[str] = None, + operation: str = "sum", +) -> DataArray: + """Convert a fully timesliced array to a coarser level. + + The operation can be either 'sum', or 'mean': + - sum: sum values at each compressed timeslice level + - mean: take a weighted average of values at each compressed timeslice level, + according to timeslice length + + Args: + data: Timesliced array to compress. Must have the same timeslicing as ts. + ts: Dataarray with timeslice lengths. If None, defaults to the global timeslice. + level: Level to compress to. If None, don't compress. + operation: Operation to perform ("sum" or "mean"). Defaults to "sum". - INTENSIVE = "intensive" - EXTENSIVE = "extensive" - - -def convert_timeslice( - x: Union[DataArray, Dataset], - ts: Union[DataArray, Dataset, MultiIndex], - quantity: Union[QuantityType, str] = QuantityType.EXTENSIVE, - finest: Optional[DataArray] = None, - transforms: Optional[dict[tuple, ndarray]] = None, -) -> Union[DataArray, Dataset]: - '''Adjusts the timeslice of x to match that of ts. - - The conversion can be done in on of two ways, depending on whether the - quantity is extensive or intensive. See `QuantityType`. - - Example: - Lets define three timeslices from finest, to fine, to rough: - - >>> toml = """ - ... ["timeslices"] - ... winter.weekday.day = 5 - ... winter.weekday.night = 5 - ... winter.weekend.day = 2 - ... winter.weekend.night = 2 - ... summer.weekday.day = 5 - ... summer.weekday.night = 5 - ... summer.weekend.day = 2 - ... summer.weekend.night = 2 - ... level_names = ["semester", "week", "day"] - ... aggregates.allday = ["day", "night"] - ... aggregates.allweek = ["weekend", "weekday"] - ... aggregates.allyear = ["winter", "summer"] - ... """ - >>> from muse.timeslices import setup_module - >>> from muse.readers import read_timeslices - >>> setup_module(toml) - >>> finest_ts = read_timeslices() - >>> fine_ts = read_timeslices(dict(week=["allweek"])) - >>> rough_ts = read_timeslices(dict(semester=["allyear"], day=["allday"])) - - Lets also define to other data-arrays to demonstrate how we can play with - dimensions: - - >>> from numpy import array - >>> x = DataArray( - ... [5, 2, 3], - ... coords={'a': array([1, 2, 3], dtype="int64")}, - ... dims='a' - ... ) - >>> y = DataArray([1, 1, 2], coords={'b': ["d", "e", "f"]}, dims='b') - - We can now easily convert arrays with different dimensions. First, lets check - conversion from an array with no timeslices: - - >>> from xarray import ones_like - >>> from muse.timeslices import convert_timeslice, QuantityType - >>> z = convert_timeslice(x, finest_ts, QuantityType.EXTENSIVE) - >>> z.round(6) - Size: 192B - array([[0.892857, 0.357143, 0.535714], - [0.892857, 0.357143, 0.535714], - [0.357143, 0.142857, 0.214286], - [0.357143, 0.142857, 0.214286], - [0.892857, 0.357143, 0.535714], - [0.892857, 0.357143, 0.535714], - [0.357143, 0.142857, 0.214286], - [0.357143, 0.142857, 0.214286]]) - Coordinates: - * timeslice (timeslice) object 64B MultiIndex - * semester (timeslice) object 64B 'winter' 'winter' ... 'summer' 'summer' - * week (timeslice) object 64B 'weekday' 'weekday' ... 'weekend' - * day (timeslice) object 64B 'day' 'night' 'day' ... 'day' 'night' - * a (a) int64 24B 1 2 3 - >>> z.sum("timeslice") - Size: 24B - array([5., 2., 3.]) - Coordinates: - * a (a) int64 24B 1 2 3 - - As expected, the sum over timeslices recovers the original array. - - In the case of an intensive quantity without a timeslice dimension, the - operation does not do anything: - - >>> convert_timeslice([1, 2], rough_ts, QuantityType.INTENSIVE) - [1, 2] - - More interesting is the conversion between different timeslices: - - >>> from xarray import zeros_like - >>> zfine = x + y + zeros_like(fine_ts.timeslice, dtype=int) - >>> zrough = convert_timeslice(zfine, rough_ts) - >>> zrough.round(6) - Size: 144B - array([[[17.142857, 17.142857, 20. ], - [ 8.571429, 8.571429, 11.428571], - [11.428571, 11.428571, 14.285714]], - - [[ 6.857143, 6.857143, 8. ], - [ 3.428571, 3.428571, 4.571429], - [ 4.571429, 4.571429, 5.714286]]]) - Coordinates: - * timeslice (timeslice) object 16B MultiIndex - * semester (timeslice) object 16B 'allyear' 'allyear' - * week (timeslice) object 16B 'weekday' 'weekend' - * day (timeslice) object 16B 'allday' 'allday' - * a (a) int64 24B 1 2 3 - * b (b) >> from numpy import all - >>> all(zfine.sum("timeslice").round(6) == zrough.sum("timeslice").round(6)) - Size: 1B - array(True) - - Or that the ratio of weekdays to weekends makes sense: - >>> weekdays = ( - ... zrough - ... .unstack("timeslice") - ... .sel(week="weekday") - ... .stack(timeslice=["semester", "day"]) - ... .squeeze() - ... ) - >>> weekend = ( - ... zrough - ... .unstack("timeslice") - ... .sel(week="weekend") - ... .stack(timeslice=["semester", "day"]) - ... .squeeze() - ... ) - >>> bool(all((weekend * 5).round(6) == (weekdays * 2).round(6))) - True - ''' - if finest is None: - global TIMESLICE - finest = TIMESLICE - if transforms is None: - global TRANSFORMS - transforms = TRANSFORMS - if hasattr(ts, "timeslice"): - ts = ts.timeslice - has_ts = "timeslice" in getattr(x, "dims", ()) - same_ts = has_ts and len(ts) == len(x.timeslice) and x.timeslice.equals(ts) - if same_ts or ((not has_ts) and quantity == QuantityType.INTENSIVE): - return x - quantity = QuantityType(quantity) - proj0 = timeslice_projector(x, finest=finest, transforms=transforms) - proj1 = timeslice_projector(ts, finest=finest, transforms=transforms) - if quantity is QuantityType.EXTENSIVE: - finest = finest.rename(timeslice="finest_timeslice") - index = finest.get_index("finest_timeslice") - index = index.set_names(f"finest_{u}" for u in index.names) - mindex_coords = xr.Coordinates.from_pandas_multiindex(index, "finest_timeslice") - finest = finest.drop_vars(list(finest.coords)).assign_coords(mindex_coords) - proj0 = proj0 * finest - proj0 = proj0 / proj0.sum("finest_timeslice") - elif quantity is QuantityType.INTENSIVE: - proj1 = proj1 / proj1.sum("finest_timeslice") - - new_names = {"timeslice": "final_ts"} | { - c: f"{c}_ts" for c in proj1.timeslice.coords if c != "timeslice" - } - P = (proj1.rename(**new_names) * proj0).sum("finest_timeslice") - - final_names = {"final_ts": "timeslice"} | { - c: c.replace("_ts", "") for c in P.final_ts.coords if c != "final_ts" - } - return (P * x).sum("timeslice").rename(**final_names) - - -def new_to_old_timeslice(ts: DataArray, ag_level="Month") -> dict: - """Transforms timeslices defined as DataArray to a pandas dataframe. - - This function is used in the LegacySector class to adapt the new MCA timeslices to - the format required by the old sectors. """ - length = len(ts.month.values) - converted_ts = { - "Month": [kebab_to_camel(w) for w in ts.month.values], - "Day": [kebab_to_camel(w) for w in ts.day.values], - "Hour": [kebab_to_camel(w) for w in ts.hour.values], - "RepresentHours": list(ts.represent_hours.values.astype(float)), - "SN": list(range(1, length + 1)), - "AgLevel": [ag_level] * length, - } - return converted_ts - - -def represent_hours( - timeslices: DataArray, nhours: Union[int, float] = 8765.82 + if ts is None: + ts = TIMESLICE + + # Raise error if data is not timesliced appropriately + if "timeslice" not in data.dims: + raise ValueError("Data must have a 'timeslice' dimension.") + if not data.timeslice.reset_coords(drop=True).equals(ts.timeslice): + raise ValueError("Data has incompatible timeslicing with reference.") + + # If level is not specified, don't compress + if level is None: + return data + + # level must be a valid timeslice level + x_levels = data.timeslice.to_index().names + if level not in x_levels: + raise ValueError(f"Unknown level: {level}. Must be one of {x_levels}.") + + # Return data unchanged if already at the desired level + if get_level(data) == level: + return data + + # Prepare mask + idx = x_levels.index(level) + kept_levels, compressed_levels = x_levels[: idx + 1], x_levels[idx + 1 :] + mask = ts.unstack(dim="timeslice") + if operation == "sum": + mask = mask.where(np.isnan(mask), 1) + elif operation == "mean": + mask = mask / mask.sum(compressed_levels) + else: + raise ValueError(f"Unknown operation: {operation}. Must be 'sum' or 'mean'.") + + # Perform the operation + result = ( + (data.unstack(dim="timeslice") * mask) + .sum(compressed_levels) + .stack(timeslice=kept_levels) + ) + return sort_timeslices(result, ts) + + +def expand_timeslice( + data: DataArray, ts: Optional[DataArray] = None, operation: str = "distribute" ) -> DataArray: - """Number of hours per timeslice. + """Convert a timesliced array to a finer level. + + The operation can be either 'distribute', or 'broadcast' + - distribute: distribute values over the new timeslice level(s) according to + timeslice lengths, such that the sum of the output over all timeslices is equal + to the sum of the input + - broadcast: broadcast values across over the new timeslice level(s) + + Args: + data: Timesliced array to expand. + ts: Dataarray with timeslice lengths. If None, defaults to the global timeslice. + operation: Operation to perform ("distribute" or "broadcast"). + Defaults to "distribute". - Arguments: - timeslices: The timeslice for which to compute the number of hours - nhours: The total number of hours represented in the timeslice. Defaults to the - average number of hours in year. """ - return convert_timeslice(DataArray([nhours]), timeslices).squeeze() + if ts is None: + ts = TIMESLICE + + # Raise error if data is not timesliced + if "timeslice" not in data.dims: + raise ValueError("Data must have a 'timeslice' dimension.") + + # Get level names + ts_levels = ts.timeslice.to_index().names + x_levels = data.timeslice.to_index().names + + # Raise error if x_levels is not a subset of ts_levels + if not set(x_levels).issubset(ts_levels): + raise ValueError( + "Data has incompatible timeslicing with reference. " + f"Timeslice levels of data ({x_levels}) must be a subset of ts " + f"({ts_levels})." + ) + + # Return data unchanged if already at the desired level + finest_level = get_level(ts) + current_level = get_level(data) + if current_level == finest_level: + return data + + # Prepare mask + mask = ts.unstack(dim="timeslice") + if operation == "broadcast": + mask = mask.where(np.isnan(mask), 1) + elif operation == "distribute": + mask = mask / mask.sum(ts_levels[ts_levels.index(current_level) + 1 :]) + else: + raise ValueError( + f"Unknown operation: {operation}. Must be 'distribute' or 'broadcast'." + ) + + # Perform the operation + result = ( + (data.unstack(dim="timeslice") * mask) + .stack(timeslice=ts_levels) + .dropna("timeslice") + ) + return sort_timeslices(result, ts) def drop_timeslice(data: DataArray) -> DataArray: @@ -607,4 +287,49 @@ def drop_timeslice(data: DataArray) -> DataArray: return data.drop_vars(data.timeslice.indexes) -setup_module(DEFAULT_TIMESLICE_DESCRIPTION) +def get_level(data: DataArray) -> str: + """Get the timeslice level of a DataArray.""" + if "timeslice" not in data.dims: + raise ValueError("Data does not have a 'timeslice' dimension.") + return data.timeslice.to_index().names[-1] + + +def sort_timeslices(data: DataArray, ts: Optional[DataArray] = None) -> DataArray: + """Sorts the timeslices of a DataArray according to a reference timeslice. + + This will only sort timeslices to match the reference if the data is at the same + timeslice level as the reference. Otherwise, it will sort timeslices in alphabetical + order. + + Args: + data: Timesliced DataArray to sort. + ts: Dataarray with reference timeslices in the appropriate order + """ + if ts is None: + ts = TIMESLICE + + # If data is at the same timeslice level as ts, sort timeslices according to ts + if get_level(data) == get_level(ts): + return data.sel(timeslice=ts.timeslice) + # Otherwise, sort timeslices in alphabetical order + return data.sortby("timeslice") + + +def timeslice_max(data: DataArray, ts: Optional[DataArray] = None) -> DataArray: + """Find the max value over the timeslice dimension, normalized for timeslice length. + + This first annualizes the value in each timeslice by dividing by the fraction of the + year that the timeslice occupies, then takes the maximum value + + Args: + data: Timesliced DataArray to find the max of. + ts: Dataarray with timeslice lengths. If None, defaults to the global timeslice. + """ + if ts is None: + ts = TIMESLICE + + timeslice_level = get_level(data) + timeslice_fractions = compress_timeslice( + ts, ts=ts, level=timeslice_level, operation="sum" + ) / broadcast_timeslice(ts.sum(), ts=ts, level=timeslice_level) + return (data / timeslice_fractions).max("timeslice") diff --git a/src/muse/utilities.py b/src/muse/utilities.py index 7923687c8..9459196f9 100644 --- a/src/muse/utilities.py +++ b/src/muse/utilities.py @@ -1,12 +1,12 @@ """Collection of functions and stand-alone algorithms.""" +from __future__ import annotations + from collections.abc import Hashable, Iterable, Iterator, Mapping, Sequence from typing import ( Any, Callable, NamedTuple, - Optional, - Union, cast, ) @@ -14,9 +14,7 @@ import xarray as xr -def multiindex_to_coords( - data: Union[xr.Dataset, xr.DataArray], dimension: str = "asset" -): +def multiindex_to_coords(data: xr.Dataset | xr.DataArray, dimension: str = "asset"): """Flattens multi-index dimension into multi-coord dimension.""" from pandas import MultiIndex @@ -33,8 +31,8 @@ def multiindex_to_coords( def coords_to_multiindex( - data: Union[xr.Dataset, xr.DataArray], dimension: str = "asset" -) -> Union[xr.Dataset, xr.DataArray]: + data: xr.Dataset | xr.DataArray, dimension: str = "asset" +) -> xr.Dataset | xr.DataArray: """Creates a multi-index from flattened multiple coords.""" from pandas import MultiIndex @@ -47,11 +45,11 @@ def coords_to_multiindex( def reduce_assets( - assets: Union[xr.DataArray, xr.Dataset, Sequence[Union[xr.Dataset, xr.DataArray]]], - coords: Optional[Union[str, Sequence[str], Iterable[str]]] = None, + assets: xr.DataArray | xr.Dataset | Sequence[xr.Dataset | xr.DataArray], + coords: str | Sequence[str] | Iterable[str] | None = None, dim: str = "asset", - operation: Optional[Callable] = None, -) -> Union[xr.DataArray, xr.Dataset]: + operation: Callable | None = None, +) -> xr.DataArray | xr.Dataset: r"""Combine assets along given asset dimension. This method simplifies combining assets across multiple agents, or combining assets @@ -139,7 +137,9 @@ def reduce_assets( installed (asset) int32 12B 1990 1991 1990 Dimensions without coordinates: asset """ - from copy import copy + from copy import deepcopy + + assets = deepcopy(assets) if operation is None: @@ -148,23 +148,31 @@ def operation(x): assert operation is not None + # Concatenate assets if a sequence is given if not isinstance(assets, (xr.Dataset, xr.DataArray)): assets = xr.concat(assets, dim=dim) assert isinstance(assets, (xr.Dataset, xr.DataArray)) + + # If there are no assets, nothing needs to be done if assets[dim].size == 0: return assets + + # Coordinates to reduce over (e.g. technology, installed) if coords is None: coords = [cast(str, k) for k, v in assets.coords.items() if v.dims == (dim,)] elif isinstance(coords, str): coords = (coords,) coords = [k for k in coords if k in assets.coords and assets[k].dims == (dim,)] - assets = copy(assets) + + # Create a new dimension to group by dtypes = [(d, assets[d].dtype) for d in coords] grouper = np.array( list(zip(*(cast(Iterator, assets[d].values) for d in coords))), dtype=dtypes ) assert "grouper" not in assets.coords assets["grouper"] = "asset", grouper + + # Perform the operation result = operation(assets.groupby("grouper")).rename(grouper=dim) for i, d in enumerate(coords): result[d] = dim, [u[i] for u in result[dim].values] @@ -172,13 +180,13 @@ def operation(x): def broadcast_techs( - technologies: Union[xr.Dataset, xr.DataArray], - template: Union[xr.DataArray, xr.Dataset], + technologies: xr.Dataset | xr.DataArray, + template: xr.DataArray | xr.Dataset, dimension: str = "asset", interpolation: str = "linear", installed_as_year: bool = True, **kwargs, -) -> Union[xr.Dataset, xr.DataArray]: +) -> xr.Dataset | xr.DataArray: """Broadcasts technologies to the shape of template in given dimension. The dimensions of the technologies are fully explicit, in that each concept @@ -236,7 +244,7 @@ def broadcast_techs( return techs.sel(second_sel) -def clean_assets(assets: xr.Dataset, years: Union[int, Sequence[int]]): +def clean_assets(assets: xr.Dataset, years: int | Sequence[int]): """Cleans up and prepares asset for current iteration. - adds current and forecast year by backfilling missing entries @@ -255,11 +263,11 @@ def clean_assets(assets: xr.Dataset, years: Union[int, Sequence[int]]): def filter_input( - dataset: Union[xr.Dataset, xr.DataArray], - year: Optional[Union[int, Iterable[int]]] = None, + dataset: xr.Dataset | xr.DataArray, + year: int | Iterable[int] | None = None, interpolation: str = "linear", **kwargs, -) -> Union[xr.Dataset, xr.DataArray]: +) -> xr.Dataset | xr.DataArray: """Filter inputs, taking care to interpolate years.""" if year is None: setyear: set[int] = set() @@ -290,8 +298,8 @@ def filter_input( def filter_with_template( - data: Union[xr.Dataset, xr.DataArray], - template: Union[xr.DataArray, xr.Dataset], + data: xr.Dataset | xr.DataArray, + template: xr.DataArray | xr.Dataset, asset_dimension: str = "asset", **kwargs, ): @@ -340,7 +348,7 @@ def tupled_dimension(array: np.ndarray, axis: int): def lexical_comparison( objectives: xr.Dataset, binsize: xr.Dataset, - order: Optional[Sequence[Hashable]] = None, + order: Sequence[Hashable] | None = None, bin_last: bool = True, ) -> xr.DataArray: """Lexical comparison over the objectives. @@ -385,32 +393,22 @@ def merge_assets( dimension: str = "asset", ) -> xr.DataArray: """Merge two capacity arrays.""" + # Interpolate capacity arrays to a common time framework years = sorted(set(capa_a.year.values).union(capa_b.year.values)) - if len(capa_a.year) == 1: - result = xr.concat( - ( - capa_a, - capa_b.interp(year=years, method=interpolation).fillna(0), - ), - dim=dimension, - ).fillna(0) + capa_a_interp = capa_a + capa_b_interp = capa_b.interp(year=years, method=interpolation).fillna(0) elif len(capa_b.year) == 1: - result = xr.concat( - ( - capa_a.interp(year=years, method=interpolation).fillna(0), - capa_b, - ), - dim=dimension, - ).fillna(0) + capa_a_interp = capa_a.interp(year=years, method=interpolation).fillna(0) + capa_b_interp = capa_b else: - result = xr.concat( - ( - capa_a.interp(year=years, method=interpolation).fillna(0), - capa_b.interp(year=years, method=interpolation).fillna(0), - ), - dim=dimension, - ) + capa_a_interp = capa_a.interp(year=years, method=interpolation).fillna(0) + capa_b_interp = capa_b.interp(year=years, method=interpolation).fillna(0) + + # Concatenate the two capacity arrays + result = xr.concat((capa_a_interp, capa_b_interp), dim=dimension) + + # forgroup = result.pipe(coords_to_multiindex, dimension=dimension) if isinstance(forgroup, xr.DataArray): forgroup = forgroup.to_dataset() @@ -438,7 +436,7 @@ def avoid_repetitions(data: xr.DataArray, dim: str = "year") -> xr.DataArray: return data.year[years] -def nametuple_to_dict(nametup: Union[Mapping, NamedTuple]) -> Mapping: +def nametuple_to_dict(nametup: Mapping | NamedTuple) -> Mapping: """Transforms a nametuple of type GenericDict into an OrderDict.""" from collections import OrderedDict from dataclasses import asdict, is_dataclass @@ -537,11 +535,11 @@ def future_propagation( def agent_concatenation( - data: Mapping[Hashable, Union[xr.DataArray, xr.Dataset]], + data: Mapping[Hashable, xr.DataArray | xr.Dataset], dim: str = "asset", name: str = "agent", fill_value: Any = 0, -) -> Union[xr.DataArray, xr.Dataset]: +) -> xr.DataArray | xr.Dataset: """Concatenates input map along given dimension. Example: @@ -613,10 +611,10 @@ def agent_concatenation( def aggregate_technology_model( - data: Union[xr.DataArray, xr.Dataset], + data: xr.DataArray | xr.Dataset, dim: str = "asset", - drop: Union[str, Sequence[str]] = "installed", -) -> Union[xr.DataArray, xr.Dataset]: + drop: str | Sequence[str] = "installed", +) -> xr.DataArray | xr.Dataset: """Aggregate together assets with the same installation year. The assets of a given agent, region, and technology but different installation year @@ -659,3 +657,27 @@ def aggregate_technology_model( data, [cast(str, u) for u in data.coords if u not in drop and data[u].dims == (dim,)], ) + + +def check_dimensions( + data: xr.DataArray | xr.Dataset, + required: Iterable[str] = (), + optional: Iterable[str] = (), +): + """Ensure that an array has the required dimensions. + + This will check that all required dimensions are present, and that no other + dimensions are present, apart from those listed as optional. + + Args: + data: DataArray or Dataset to check dimensions of + required: List of dimension names that must be present + optional: List of dimension names that may be present + """ + present = set(data.dims) + missing = set(required) - present + if missing: + raise ValueError(f"Missing required dimensions: {missing}") + extra = present - set(required) - set(optional) + if extra: + raise ValueError(f"Extra dimensions: {extra}") diff --git a/tests/conftest.py b/tests/conftest.py index 37c187102..bef2e1442 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,12 +1,14 @@ from collections.abc import Mapping, Sequence from pathlib import Path from typing import Callable, Optional +from unittest.mock import patch import numpy as np from pandas import DataFrame -from pytest import fixture, mark +from pytest import fixture from xarray import DataArray, Dataset +from muse.__main__ import patched_broadcast_compat_data from muse.agents import Agent @@ -19,41 +21,12 @@ def logger(): return logger -@fixture(scope="session") -def cases_directory() -> Optional[Path]: - try: - import muse_legacy - except ImportError: - return None - - return Path(muse_legacy.__file__).parent / "data" / "test" / "cases" - - -@fixture(scope="session") -def regression_directories(cases_directory) -> Mapping[str, Path]: - if cases_directory is None: - return {} - return { - directory.name: cases_directory / directory - for directory in cases_directory.iterdir() - if directory.is_dir() - and (directory / "input").is_dir() - and (directory / "output").is_dir() - } - - -@fixture() -def sectors_dir(tmpdir): - """Copies sectors directory to new dir. - - This gives some assurance the machinery for specifying sectors data actually works. - """ - from shutil import copytree - - from muse.defaults import DEFAULT_SECTORS_DIRECTORY - - copytree(DEFAULT_SECTORS_DIRECTORY, tmpdir.join("sectors_data_dir")) - return tmpdir.join("sectors_data_dir") +@fixture(autouse=True) +def patch_broadcast_compat_data(): + with patch( + "xarray.core.variable._broadcast_compat_data", patched_broadcast_compat_data + ): + yield def compare_df( @@ -102,7 +75,6 @@ def compare_dirs() -> Callable: def compare_dirs(actual_dir, expected_dir, **kwargs): """Compares all the csv files in a directory.""" from os import walk - from pathlib import Path from pandas import read_csv @@ -134,37 +106,46 @@ def compare_dirs(actual_dir, expected_dir, **kwargs): return compare_dirs -def pytest_collection_modifyitems(config, items): - try: - __import__("SGIModelData") - except ImportError: - skip_sgi_data = mark.skip(reason="Test requires private data") - for item in items: - if "sgidata" in item.keywords: - item.add_marker(skip_sgi_data) - try: - __import__("muse_legacy") - except ImportError: - skip_legacy = mark.skip(reason="Test requires legacy code") - for item in items: - if "legacy" in item.keywords: - item.add_marker(skip_legacy) - - -@fixture -def save_timeslice_globals(): - from muse import timeslices - - old = timeslices.TIMESLICE, timeslices.TRANSFORMS - yield - timeslices.TIMESLICE, timeslices.TRANSFORMS = old - - @fixture def default_timeslice_globals(): - from muse.timeslices import DEFAULT_TIMESLICE_DESCRIPTION, setup_module + from muse.timeslices import setup_module + + default_timeslices = """ + [timeslices] + winter.weekday.night = 396 + winter.weekday.morning = 396 + winter.weekday.afternoon = 264 + winter.weekday.early-peak = 66 + winter.weekday.late-peak = 66 + winter.weekday.evening = 396 + winter.weekend.night = 156 + winter.weekend.morning = 156 + winter.weekend.afternoon = 156 + winter.weekend.evening = 156 + spring-autumn.weekday.night = 792 + spring-autumn.weekday.morning = 792 + spring-autumn.weekday.afternoon = 528 + spring-autumn.weekday.early-peak = 132 + spring-autumn.weekday.late-peak = 132 + spring-autumn.weekday.evening = 792 + spring-autumn.weekend.night = 300 + spring-autumn.weekend.morning = 300 + spring-autumn.weekend.afternoon = 300 + spring-autumn.weekend.evening = 300 + summer.weekday.night = 396 + summer.weekday.morning = 396 + summer.weekday.afternoon = 264 + summer.weekday.early-peak = 66 + summer.weekday.late-peak = 66 + summer.weekday.evening = 396 + summer.weekend.night = 150 + summer.weekend.morning = 150 + summer.weekend.afternoon = 150 + summer.weekend.evening = 150 + level_names = ["month", "day", "hour"] + """ - setup_module(DEFAULT_TIMESLICE_DESCRIPTION) + setup_module(default_timeslices) @fixture @@ -174,22 +155,6 @@ def timeslice(default_timeslice_globals) -> Dataset: return TIMESLICE -@fixture -def other_timeslice() -> Dataset: - from pandas import MultiIndex - - months = ["winter", "spring-autumn", "summer"] - days = ["all-week", "all-week", "all-week"] - hour = ["all-day", "all-day", "all-day"] - coordinates = MultiIndex.from_arrays( - [months, days, hour], names=("month", "day", "hour") - ) - result = Dataset(coords={"timeslice": coordinates}) - result["represent_hours"] = ("timeslice", [2920, 2920, 2920]) - result = result.set_coords("represent_hours") - return result - - @fixture def coords() -> Mapping: """Technoeconomics coordinates.""" @@ -397,20 +362,10 @@ def newcapa_agent(agent_args, technologies, stock) -> Agent: @fixture def retro_agent(agent_args, technologies, stock) -> Agent: + agent_args["investment"] = "adhoc" # fails with scipy solver, see # 587 return create_agent(agent_args, technologies, stock.capacity, "retrofit") -@fixture -def objective(retro_agent, coords) -> DataArray: - from numpy.random import choice, rand - - asset = retro_agent.assets.technology.rename(technology="asset") - techs = [i for i in coords["technology"] if choice((True, False))] - data = rand(len(asset), len(techs)) - coords = {"asset": asset, "technology": techs} - return DataArray(data, coords=coords, dims=("asset", "technology")) - - @fixture def stock(coords, technologies) -> Dataset: return _stock(coords, technologies) @@ -471,21 +426,6 @@ def _stock( return result -@fixture -def assets(coords, technologies) -> Dataset: - """Stock with repeat technologies.""" - from xarray import concat - - return concat( - ( - _stock(coords, technologies), - _stock(coords, technologies), - _stock(coords, technologies), - ), - dim="technology", - ) - - @fixture def search_space(retro_agent, technologies): """Example search space, as would be computed by an agent.""" @@ -588,11 +528,8 @@ def drop_optionals(settings): def warnings_as_errors(request): from warnings import simplefilter - # disable fixture for some tests using legacy sectors. + # disable fixture for some tests if ( - request.module.__name__ == "test_legacy_sector" - and request.node.name.startswith("test_legacy_sector_regression[") - ) or ( request.module.__name__ == "test_outputs" and request.node.name == "test_save_with_fullpath_to_excel_with_sink" ): diff --git a/tests/test_agents.py b/tests/test_agents.py index e3c8c2b31..ea351d9b0 100644 --- a/tests/test_agents.py +++ b/tests/test_agents.py @@ -143,7 +143,7 @@ def test_run_retro_agent(retro_agent, technologies, agent_market, demand_share): technologies.max_capacity_addition[:] = retro_agent.assets.capacity.sum() * 100 technologies.max_capacity_growth[:] = retro_agent.assets.capacity.sum() * 100 - retro_agent.next(technologies, agent_market, demand_share) + retro_agent.next(technologies, agent_market, demand_share, time_period=5) def test_merge_assets(assets): diff --git a/tests/test_constraints.py b/tests/test_constraints.py index 9660bddb0..79fa1dd45 100644 --- a/tests/test_constraints.py +++ b/tests/test_constraints.py @@ -20,14 +20,6 @@ def residential(model): return examples.sector("residential", model=model) -@fixture(params=["timeslice_as_list", "timeslice_as_multindex"]) -def timeslices(market, request): - timeslice = market.timeslice - if request.param == "timeslice_as_multindex": - timeslice = _as_list(timeslice) - return timeslice - - @fixture def technologies(residential): return residential.technologies.squeeze("region") @@ -61,7 +53,6 @@ def lpcosts(technologies, market, costs): return lp_costs( technologies.interp(year=market.year.min() + 5).drop_vars("year"), costs=costs, - timeslices=market.timeslice, ) @@ -71,13 +62,12 @@ def assets(residential): @fixture -def market_demand(assets, technologies, market): +def market_demand(assets, technologies): from muse.quantities import maximum_production return 0.8 * maximum_production( technologies.interp(year=2025), assets.capacity.sel(year=2025).groupby("technology").sum("asset"), - timeslices=market.timeslice, ).rename(technology="asset") @@ -208,12 +198,12 @@ def test_lp_constraint(constraint, lpcosts): assert result.b.values == approx(0) -def test_to_scipy_adapter_maxprod(technologies, costs, max_production, timeslices): +def test_to_scipy_adapter_maxprod(technologies, costs, max_production): from muse.constraints import ScipyAdapter, lp_costs technologies = technologies.interp(year=2025) - adapter = ScipyAdapter.factory(technologies, costs, timeslices, max_production) + adapter = ScipyAdapter.factory(technologies, costs, max_production) assert set(adapter.kwargs) == {"c", "A_ub", "b_ub", "A_eq", "b_eq", "bounds"} assert adapter.bounds == (0, np.inf) assert adapter.A_eq is None @@ -224,7 +214,7 @@ def test_to_scipy_adapter_maxprod(technologies, costs, max_production, timeslice assert adapter.b_ub.size == adapter.A_ub.shape[0] assert adapter.c.size == adapter.A_ub.shape[1] - lpcosts = lp_costs(technologies, costs, timeslices) + lpcosts = lp_costs(technologies, costs) capsize = lpcosts.capacity.size prodsize = lpcosts.production.size assert adapter.c.size == capsize + prodsize @@ -233,12 +223,12 @@ def test_to_scipy_adapter_maxprod(technologies, costs, max_production, timeslice assert adapter.A_ub[:, capsize:] == approx(np.eye(prodsize)) -def test_to_scipy_adapter_demand(technologies, costs, demand_constraint, timeslices): +def test_to_scipy_adapter_demand(technologies, costs, demand_constraint): from muse.constraints import ScipyAdapter, lp_costs technologies = technologies.interp(year=2025) - adapter = ScipyAdapter.factory(technologies, costs, timeslices, demand_constraint) + adapter = ScipyAdapter.factory(technologies, costs, demand_constraint) assert set(adapter.kwargs) == {"c", "A_ub", "b_ub", "A_eq", "b_eq", "bounds"} assert adapter.bounds == (0, np.inf) assert adapter.A_ub is not None @@ -251,7 +241,7 @@ def test_to_scipy_adapter_demand(technologies, costs, demand_constraint, timesli assert adapter.b_ub.size == adapter.A_ub.shape[0] assert adapter.c.size == adapter.A_ub.shape[1] - lpcosts = lp_costs(technologies, costs, timeslices) + lpcosts = lp_costs(technologies, costs) capsize = lpcosts.capacity.size prodsize = lpcosts.production.size assert adapter.c.size == capsize + prodsize @@ -265,15 +255,13 @@ def test_to_scipy_adapter_demand(technologies, costs, demand_constraint, timesli def test_to_scipy_adapter_max_capacity_expansion( - technologies, costs, max_capacity_expansion, timeslices + technologies, costs, max_capacity_expansion ): from muse.constraints import ScipyAdapter, lp_costs technologies = technologies.interp(year=2025) - adapter = ScipyAdapter.factory( - technologies, costs, timeslices, max_capacity_expansion - ) + adapter = ScipyAdapter.factory(technologies, costs, max_capacity_expansion) assert set(adapter.kwargs) == {"c", "A_ub", "b_ub", "A_eq", "b_eq", "bounds"} assert adapter.bounds == (0, np.inf) assert adapter.A_ub is not None @@ -287,7 +275,7 @@ def test_to_scipy_adapter_max_capacity_expansion( assert adapter.c.size == adapter.A_ub.shape[1] assert adapter.c.ndim == 1 - lpcosts = lp_costs(technologies, costs, timeslices) + lpcosts = lp_costs(technologies, costs) capsize = lpcosts.capacity.size prodsize = lpcosts.production.size assert adapter.c.size == capsize + prodsize @@ -297,12 +285,12 @@ def test_to_scipy_adapter_max_capacity_expansion( assert set(adapter.A_ub[:, :capsize].flatten()) == {0.0, 1.0} -def test_to_scipy_adapter_no_constraint(technologies, costs, timeslices): +def test_to_scipy_adapter_no_constraint(technologies, costs): from muse.constraints import ScipyAdapter, lp_costs technologies = technologies.interp(year=2025) - adapter = ScipyAdapter.factory(technologies, costs, timeslices) + adapter = ScipyAdapter.factory(technologies, costs) assert set(adapter.kwargs) == {"c", "A_ub", "b_ub", "A_eq", "b_eq", "bounds"} assert adapter.bounds == (0, np.inf) assert adapter.A_ub is None @@ -311,18 +299,18 @@ def test_to_scipy_adapter_no_constraint(technologies, costs, timeslices): assert adapter.b_eq is None assert adapter.c.ndim == 1 - lpcosts = lp_costs(technologies, costs, timeslices) + lpcosts = lp_costs(technologies, costs) capsize = lpcosts.capacity.size prodsize = lpcosts.production.size assert adapter.c.size == capsize + prodsize -def test_back_to_muse_capacity(technologies, costs, timeslices): +def test_back_to_muse_capacity(technologies, costs): from muse.constraints import ScipyAdapter, lp_costs technologies = technologies.interp(year=2025) - lpcosts = lp_costs(technologies, costs, timeslices) + lpcosts = lp_costs(technologies, costs) data = ScipyAdapter._unified_dataset(technologies, lpcosts) lpquantity = ScipyAdapter._selected_quantity(data, "capacity") assert set(lpquantity.dims) == {"d(asset)", "d(replacement)"} @@ -332,12 +320,12 @@ def test_back_to_muse_capacity(technologies, costs, timeslices): assert (copy == lpcosts.capacity).all() -def test_back_to_muse_production(technologies, costs, timeslices): +def test_back_to_muse_production(technologies, costs): from muse.constraints import ScipyAdapter, lp_costs technologies = technologies.interp(year=2025) - lpcosts = lp_costs(technologies, costs, timeslices) + lpcosts = lp_costs(technologies, costs) data = ScipyAdapter._unified_dataset(technologies, lpcosts) lpquantity = ScipyAdapter._selected_quantity(data, "production") assert set(lpquantity.dims) == { @@ -352,11 +340,11 @@ def test_back_to_muse_production(technologies, costs, timeslices): assert (copy == lpcosts.production).all() -def test_back_to_muse_all(technologies, costs, timeslices, rng: np.random.Generator): +def test_back_to_muse_all(technologies, costs, rng: np.random.Generator): from muse.constraints import ScipyAdapter, lp_costs technologies = technologies.interp(year=2025) - lpcosts = lp_costs(technologies, costs, timeslices) + lpcosts = lp_costs(technologies, costs) data = ScipyAdapter._unified_dataset(technologies, lpcosts) lpcapacity = ScipyAdapter._selected_quantity(data, "capacity") @@ -383,11 +371,11 @@ def test_back_to_muse_all(technologies, costs, timeslices, rng: np.random.Genera assert (copy.production == lpcosts.production).all() -def test_scipy_adapter_back_to_muse(technologies, costs, timeslices, rng): +def test_scipy_adapter_back_to_muse(technologies, costs, rng): from muse.constraints import ScipyAdapter, lp_costs technologies = technologies.interp(year=2025) - lpcosts = lp_costs(technologies, costs, timeslices) + lpcosts = lp_costs(technologies, costs) data = ScipyAdapter._unified_dataset(technologies, lpcosts) lpcapacity = ScipyAdapter._selected_quantity(data, "capacity") @@ -406,7 +394,7 @@ def test_scipy_adapter_back_to_muse(technologies, costs, timeslices, rng): ) ) - adapter = ScipyAdapter.factory(technologies, costs, timeslices) + adapter = ScipyAdapter.factory(technologies, costs) assert (adapter.to_muse(x).capacity == lpcosts.capacity).all() assert (adapter.to_muse(x).production == lpcosts.production).all() @@ -422,14 +410,12 @@ def _as_list(data: Union[xr.DataArray, xr.Dataset]) -> Union[xr.DataArray, xr.Da return data -def test_scipy_adapter_standard_constraints( - technologies, costs, constraints, timeslices -): +def test_scipy_adapter_standard_constraints(technologies, costs, constraints): from muse.constraints import ScipyAdapter technologies = technologies.interp(year=2025) - adapter = ScipyAdapter.factory(technologies, costs, timeslices, *constraints) + adapter = ScipyAdapter.factory(technologies, costs, *constraints) maxprod = next(cs for cs in constraints if cs.name == "max_production") maxcapa = next(cs for cs in constraints if cs.name == "max capacity expansion") demand = next(cs for cs in constraints if cs.name == "demand") diff --git a/tests/test_costs.py b/tests/test_costs.py index 66c89fa79..1099e2312 100644 --- a/tests/test_costs.py +++ b/tests/test_costs.py @@ -18,15 +18,13 @@ def _capacity(technologies, demand_share): @fixture -def _production(technologies, _capacity, demand_share): - from muse.timeslices import QuantityType, convert_timeslice +def _production(technologies, _capacity): + from muse.timeslices import broadcast_timeslice, distribute_timeslice production = ( - _capacity - * convert_timeslice( - technologies.fixed_outputs, demand_share.timeslice, QuantityType.EXTENSIVE - ) - * technologies.utilization_factor + broadcast_timeslice(_capacity) + * distribute_timeslice(technologies.fixed_outputs) + * broadcast_timeslice(technologies.utilization_factor) ) return production diff --git a/tests/test_demand_share.py b/tests/test_demand_share.py index 439d416e9..fc1b14602 100644 --- a/tests/test_demand_share.py +++ b/tests/test_demand_share.py @@ -8,20 +8,20 @@ def matching_market(technologies, stock, timeslice): """A market which matches stocks exactly.""" return ( - _matching_market(technologies, stock, timeslice) + _matching_market(technologies, stock) .interp(year=[2010, 2015, 2020, 2025]) .transpose("timeslice", "region", "commodity", "year") ) -def _matching_market(technologies, stock, timeslice): +def _matching_market(technologies, stock): """A market which matches stocks exactly.""" from numpy.random import random from muse.quantities import consumption, maximum_production market = xr.Dataset() - production = maximum_production(technologies, stock.capacity, timeslices=timeslice) + production = maximum_production(technologies, stock.capacity) market["supply"] = production.sum("asset") market["consumption"] = drop_timeslice( consumption(technologies, production).sum("asset") + market.supply @@ -120,17 +120,17 @@ def test_new_retro_split_zero_new_unmet(technologies, stock, matching_market): def test_new_retro_accounting_identity(technologies, stock, market): from muse.demand_share import new_and_retro_demands - from muse.production import factory + from muse.quantities import maximum_production share = new_and_retro_demands( stock.capacity, market, technologies, current_year=2010, forecast=5 ) assert (share >= 0).all() - production_method = factory() serviced = ( - production_method( - market.interp(year=2015), stock.capacity.interp(year=2015), technologies + maximum_production( + capacity=stock.capacity.interp(year=2015), + technologies=technologies, ) .groupby("region") .sum("asset") @@ -158,7 +158,6 @@ def method(capacity): return decommissioning_demand( technologies.sel(region="USA"), capacity, - matching_market.timeslice, year=[2012, 2017], ) @@ -195,7 +194,6 @@ def method(capacity): return 0 * decommissioning_demand( technologies.sel(region="USA"), capacity, - matching_market.timeslice, year=[2012, 2017], ) @@ -233,8 +231,8 @@ def test_new_retro_demand_share(technologies, coords, market, timeslice, stock_f asia_stock = stock_factory(coords, technologies).expand_dims(region=["ASEAN"]) usa_stock = stock_factory(coords, technologies).expand_dims(region=["USA"]) - asia_market = _matching_market(technologies, asia_stock, timeslice) - usa_market = _matching_market(technologies, usa_stock, timeslice) + asia_market = _matching_market(technologies, asia_stock) + usa_market = _matching_market(technologies, usa_stock) market = xr.concat((asia_market, usa_market), dim="region") market.consumption.loc[{"year": 2031}] *= 2 @@ -287,8 +285,8 @@ def test_standard_demand_share(technologies, coords, market, timeslice, stock_fa asia_stock = stock_factory(coords, technologies).expand_dims(region=["ASEAN"]) usa_stock = stock_factory(coords, technologies).expand_dims(region=["USA"]) - asia_market = _matching_market(technologies, asia_stock, timeslice) - usa_market = _matching_market(technologies, usa_stock, timeslice) + asia_market = _matching_market(technologies, asia_stock) + usa_market = _matching_market(technologies, usa_stock) market = xr.concat((asia_market, usa_market), dim="region") market.consumption.loc[{"year": 2031}] *= 2 @@ -340,9 +338,10 @@ def test_unmet_forecast_demand(technologies, coords, timeslice, stock_factory): asia_stock = stock_factory(coords, technologies).expand_dims(region=["ASEAN"]) usa_stock = stock_factory(coords, technologies).expand_dims(region=["USA"]) - asia_market = _matching_market(technologies, asia_stock, timeslice) - usa_market = _matching_market(technologies, usa_stock, timeslice) + asia_market = _matching_market(technologies, asia_stock) + usa_market = _matching_market(technologies, usa_stock) market = xr.concat((asia_market, usa_market), dim="region") + current_year = market.year[0] # spoof some agents @dataclass @@ -355,7 +354,9 @@ class Agent: Agent(0.7 * usa_stock.squeeze("region")), Agent(asia_stock.squeeze("region")), ] - result = unmet_forecasted_demand(agents, market, technologies) + result = unmet_forecasted_demand( + agents, market, technologies, current_year=current_year, forecast=5 + ) assert set(result.dims) == set(market.consumption.dims) - {"year"} assert result.values == approx(0) @@ -365,7 +366,9 @@ class Agent: Agent(0.8 * usa_stock.squeeze("region")), Agent(1.1 * asia_stock.squeeze("region")), ] - result = unmet_forecasted_demand(agents, market, technologies) + result = unmet_forecasted_demand( + agents, market, technologies, current_year=current_year, forecast=5 + ) assert set(result.dims) == set(market.consumption.dims) - {"year"} assert result.values == approx(0) @@ -374,7 +377,9 @@ class Agent: Agent(0.5 * usa_stock.squeeze("region")), Agent(0.5 * asia_stock.squeeze("region")), ] - result = unmet_forecasted_demand(agents, market, technologies) + result = unmet_forecasted_demand( + agents, market, technologies, current_year=current_year, forecast=5 + ) comm_usage = technologies.comm_usage.sel(commodity=market.commodity) enduse = is_enduse(comm_usage) assert (result.commodity == comm_usage.commodity).all() diff --git a/tests/test_fullsim_regression.py b/tests/test_fullsim_regression.py index 4fe089b83..43609bcf3 100644 --- a/tests/test_fullsim_regression.py +++ b/tests/test_fullsim_regression.py @@ -5,7 +5,6 @@ from muse.examples import available_examples -@mark.usefixtures("save_timeslice_globals") @mark.regression @mark.example @mark.parametrize("model", available_examples()) @@ -40,7 +39,6 @@ def available_tutorials(): return [d.parent for d in base_path.rglob("*/input") if d.is_dir()] -@mark.usefixtures("save_timeslice_globals") @mark.regression @mark.tutorial @mark.parametrize("tutorial_path", available_tutorials()) diff --git a/tests/test_investments.py b/tests/test_investments.py index 92b9abbbc..d22909a8e 100644 --- a/tests/test_investments.py +++ b/tests/test_investments.py @@ -1,33 +1,4 @@ -from pytest import fixture, mark - - -@fixture -def capacity_expansion(): - from numpy import arange - from numpy.random import rand - from xarray import Dataset - - from muse.investments import CapacityAddition - - data = Dataset() - data["asset"] = "asset", arange(5, 10) - data["replacement"] = "replacement", arange(0, 6) - data["ranks"] = data.asset + data.replacement // 2 - data["ranks"] = data.ranks.rank("replacement").astype(int) - data["deltas"] = ( - ("asset", "replacement"), - rand(data.asset.size, data.replacement.size), - ) - data["deltas"] *= rand(*data.deltas.shape) > 0.25 - - return CapacityAddition(data.ranks, data.deltas) - - -def add_var(coordinates, *dims, factor=100.0): - from numpy.random import rand - - shape = tuple(len(coordinates[u]) for u in dims) - return dims, (rand(*shape) * factor).astype(type(factor)) +from pytest import mark def test_cliff_retirement_known_profile(): @@ -44,7 +15,7 @@ def test_cliff_retirement_known_profile(): name="technical_life", ) - profile = cliff_retirement_profile(lifetime) + profile = cliff_retirement_profile(technical_life=lifetime, investment_year=2020) expected = array( [ [True, False, False, False], @@ -73,12 +44,12 @@ def test_cliff_retirement_random_profile(protected): ) effective_lifetime = (protected // lifetime + 1) * lifetime - current = 5 + investment_year = 2020 profile = cliff_retirement_profile( - lifetime, current_year=current, protected=protected + technical_life=lifetime.clip(min=protected), investment_year=investment_year ) - assert profile.year.min() == current - assert profile.year.max() <= current + effective_lifetime.max() + 1 - assert profile.astype(int).interp(year=current).all() - assert profile.astype(int).interp(year=current + protected).all() + assert profile.year.min() == investment_year + assert profile.year.max() <= investment_year + effective_lifetime.max() + 1 + assert profile.astype(int).interp(year=investment_year).all() + assert profile.astype(int).interp(year=investment_year + protected - 1).all() assert not profile.astype(int).interp(year=profile.year.max()).any() diff --git a/tests/test_legacy_sector.py b/tests/test_legacy_sector.py deleted file mode 100644 index 6b9aaca43..000000000 --- a/tests/test_legacy_sector.py +++ /dev/null @@ -1,183 +0,0 @@ -from pathlib import Path -from typing import Optional - -from pytest import approx, mark - - -def legacy_inputs(): - try: - import muse_legacy - except ImportError: - return [] - - from muse_legacy.sectors import SECTORS - - excluded = { - "Bioenergy", - "Commercial", - "Industry", - "NET", - "Refinery", - "Residential", - "IndustryABM", - "Sequestration", - "TradeSupply", - "TradeRefinery", - "TradePower", - "Transport", - "Shipping", - "Supply", - "Power", - } - - return [ - ( - sector, - Path(muse_legacy.__file__).parent - / "data" - / "test" - / "cases" - / sector - / f"settings_legacy_{sector.lower()}.toml", - ) - for sector in set(SECTORS) - excluded - ] - - -def legacy_input_file(sector: str) -> Optional[Path]: - """Gets the legacy sector settings file.""" - input_file = ( - Path(__file__).parent - / "data" - / "cases" - / sector - / f"settings_legacy_{sector.lower()}.toml" - ) - - return input_file - - -def update_settings(settings, sec_dir, out_dir): - """Updates a settings namedtuple with temporal sectors and output directories.""" - sectors = settings.sectors - - for s in sectors.list: - path = Path(sec_dir) / s - sector = getattr(sectors, s)._replace( - userdata_path=path, technodata_path=path, output_path=out_dir - ) - sectors = sectors._replace(**{s: sector}) - - return settings._replace(sectors=sectors) - - -@mark.legacy -@mark.sgidata -@mark.parametrize("sector,filepath", legacy_inputs()) -def test_legacy_sector_creation(sector, filepath): - """Test the creation of the legacy sectors.""" - from muse.readers import read_settings - from muse.sectors import SECTORS_REGISTERED - - settings = read_settings(filepath) - - SECTORS_REGISTERED["legacy"](name=sector, settings=settings) - - -def test_xarray_to_array(market): - import numpy as np - - from muse.sectors.legacy_sector import xarray_to_ndarray - from muse.timeslices import QuantityType - - dims = ("commodity", "region", "year", "timeslice") - arr = xarray_to_ndarray( - years=market.year, - xdata=market.supply, - ts=market.timeslice, - qt=QuantityType.EXTENSIVE, - global_commodities=market.commodity, - dims=dims, - regions=np.array(market.region), - ) - - assert arr == approx(market.supply.transpose(*dims).values) - - -def test_array_to_xarray(market): - from numpy import array - from xarray import broadcast - - from muse.sectors.legacy_sector import ndarray_to_xarray - from muse.timeslices import QuantityType - - dims = ("commodity", "region", "year", "timeslice") - arr = market.supply.transpose(*dims).values - result = ndarray_to_xarray( - years=market.year, - data=arr, - ts=market.timeslice, - qt=QuantityType.EXTENSIVE, - global_commodities=market.commodity, - sector_commodities=market.commodity, - data_ts=market.timeslice, - dims=dims, - regions=array(market.region), - ) - - expected, actual = broadcast(market.supply, result) - assert actual.values == approx(expected.values) - - -def test_round_trip(market): - from numpy import array - from xarray import broadcast - - from muse.sectors.legacy_sector import ndarray_to_xarray, xarray_to_ndarray - from muse.timeslices import QuantityType - - dims = ("commodity", "region", "year", "timeslice") - - arr = xarray_to_ndarray( - years=market.year, - xdata=market.supply, - ts=market.timeslice, - qt=QuantityType.EXTENSIVE, - global_commodities=market.commodity, - dims=dims, - regions=array(market.region), - ) - - result = ndarray_to_xarray( - years=market.year, - data=arr, - ts=market.timeslice, - qt=QuantityType.EXTENSIVE, - global_commodities=market.commodity, - sector_commodities=market.commodity, - data_ts=market.timeslice, - dims=dims, - regions=array(market.region), - ) - - expected, actual = broadcast(market.supply, result) - assert actual.values == approx(expected.values) - - -@mark.legacy -@mark.sgidata -@mark.regression -@mark.parametrize("sector,filepath", legacy_inputs()) -def test_legacy_sector_regression(sector, filepath, sectors_dir, tmpdir, compare_dirs): - """Test the execution of the next method in the legacy sectors for 1 year.""" - from muse.mca import MCA - from muse.readers import read_settings - - settings = read_settings(filepath) - settings = update_settings(settings, sectors_dir, tmpdir) - - mca = MCA.factory(settings) - mca.run() - - regression_dir = filepath.parent - compare_dirs(tmpdir, regression_dir / "output") diff --git a/tests/test_objectives.py b/tests/test_objectives.py index a8030634d..593877f4d 100644 --- a/tests/test_objectives.py +++ b/tests/test_objectives.py @@ -60,11 +60,15 @@ def test_computing_objectives(_technologies, _demand, _prices): from muse.objectives import factory, register_objective @register_objective - def first(technologies, switch=True, *args, **kwargs): - from xarray import full_like + def first(technologies, demand, switch=True, *args, **kwargs): + from xarray import broadcast, full_like value = 1 if switch else 2 - result = full_like(technologies["replacement"], value, dtype=float) + result = full_like( + broadcast(technologies["replacement"], demand["asset"])[0], + value, + dtype=float, + ) return result @register_objective @@ -104,20 +108,20 @@ def second(technologies, demand, assets=None, *args, **kwargs): assert (objectives.second.isel(asset=1) == 5).all() -def test_comfort(_technologies): +def test_comfort(_technologies, _demand): from muse.objectives import comfort _technologies["comfort"] = add_var(_technologies, "replacement") - result = comfort(_technologies) - assert set(result.dims) == {"replacement"} + result = comfort(_technologies, _demand) + assert set(result.dims) == {"replacement", "asset"} -def test_efficiency(_technologies): +def test_efficiency(_technologies, _demand): from muse.objectives import efficiency _technologies["efficiency"] = add_var(_technologies, "replacement") - result = efficiency(_technologies) - assert set(result.dims) == {"replacement"} + result = efficiency(_technologies, _demand) + assert set(result.dims) == {"replacement", "asset"} def test_capacity_to_service_demand(_technologies, _demand): @@ -148,12 +152,12 @@ def test_fixed_costs(_technologies, _demand): assert set(result.dims) == {"replacement", "asset"} -def test_capital_costs(_technologies): +def test_capital_costs(_technologies, _demand): from muse.objectives import capital_costs _technologies["scaling_size"] = add_var(_technologies, "replacement") - result = capital_costs(_technologies) - assert set(result.dims) == {"replacement"} + result = capital_costs(_technologies, _demand) + assert set(result.dims) == {"replacement", "asset"} def test_emission_cost(_technologies, _demand, _prices): @@ -174,7 +178,7 @@ def test_annual_levelized_cost_of_energy(_technologies, _demand, _prices): from muse.objectives import annual_levelized_cost_of_energy result = annual_levelized_cost_of_energy(_technologies, _demand, _prices) - assert set(result.dims) == {"replacement"} + assert set(result.dims) == {"replacement", "asset"} def test_lifetime_levelized_cost_of_energy(_technologies, _demand, _prices): diff --git a/tests/test_outputs.py b/tests/test_outputs.py index bedcf4d22..db011e150 100644 --- a/tests/test_outputs.py +++ b/tests/test_outputs.py @@ -28,33 +28,6 @@ def streetcred(*args, **kwargs): ) -@fixture -def limits_path(tmp_path): - from textwrap import dedent - - path = tmp_path / "limits.csv" - path.write_text( - dedent( - """ - Year,Month,Day,Hour,Region,Gas - 2020,all-year,all-week,night,R1,5 - 2020,all-year,all-week,morning,R1,5 - 2020,all-year,all-week,afternoon,R1,5 - 2020,all-year,all-week,early-peak,R1,5 - 2020,all-year,all-week,late-peak,R1,5 - 2020,all-year,all-week,evening,R1,5 - 2050,all-year,all-week,night,R1,8 - 2050,all-year,all-week,morning,R1,8 - 2050,all-year,all-week,afternoon,R1,8 - 2050,all-year,all-week,early-peak,R1,8 - 2050,all-year,all-week,late-peak,R1,8 - 2050,all-year,all-week,evening,R1,8 - """ - ) - ) - return path - - @mark.usefixtures("streetcred") def test_save_with_dir(tmpdir): from pandas import read_csv @@ -333,64 +306,6 @@ def dummy(market, **kwargs): ) -def test_aggregate_resources(market): - from muse.outputs.mca import AggregateResources - - commodity = str(market.commodity.isel(commodity=0).values) - output = AggregateResources(commodity) - a = output(market, []).copy() - assert ( - a == market.consumption.sel(year=2010, commodity=commodity, drop=True) - ).all() - b = output(market, []).copy() - assert (b == 2 * a).all() - - -def test_finite_resources_quantity(limits_path): - from muse import examples - from muse.outputs.mca import FiniteResources - - market = examples.mca_market()[["consumption"]] - - output = FiniteResources(limits_path=limits_path, commodities="gas") - result = output(market, []) - assert set(result.dims) == {"region", "timeslice", "commodity"} - assert result.all() - - market.consumption.loc[dict(commodity="gas")] = 3.0 - result = output(market, []) - assert result.all() - - result = output(market, []) - assert not result.all() - - -def test_finite_resources_in_sim(tmp_path, limits_path): - from toml import dump, load - - from muse import examples - from muse.mca import MCA - from muse.outputs.sinks import FiniteResourceException - from muse.readers.toml import read_settings - - examples.copy_model("default", path=tmp_path) - toml = load(tmp_path / "model" / "settings.toml") - toml["outputs"].append( - dict( - quantity="finite_resources", - limits_path=str(limits_path.resolve()), - early_exit=True, - commodities="gas", - ) - ) - with open(tmp_path / "model" / "settings.toml", "w") as fileobj: - dump(toml, fileobj) - - mca = MCA.factory(read_settings(tmp_path / "model" / "settings.toml")) - with raises(FiniteResourceException): - mca.run() - - def test_register_output_quantity_cache(): from muse.outputs.cache import OUTPUT_QUANTITIES, register_cached_quantity diff --git a/tests/test_presets.py b/tests/test_presets.py deleted file mode 100644 index 70be1b9bd..000000000 --- a/tests/test_presets.py +++ /dev/null @@ -1,141 +0,0 @@ -from pytest import fixture, mark - -pytestmark = mark.usefixtures("default_timeslice_globals") - - -@fixture -def commercial_path(): - from pathlib import Path - - import muse_legacy - - return ( - Path(muse_legacy.__file__).parent / "data" / "test" / "presets" / "commercial" - ) - - -@mark.legacy -def test_presets_fails_on_missing_data(commercial_path): - from pytest import raises - - from muse.readers.toml import convert - from muse.sectors import PresetSector - - nested = convert(dict(sectors=dict(preset=dict()))) - with raises(IOError): - PresetSector.factory("preset", nested) - - -@mark.legacy -def test_presets_from_single(commercial_path): - from muse.readers.toml import convert - from muse.sectors import PresetSector - - settings = dict(consumption_path=str(commercial_path / "*Consumption.csv")) - nested = convert(dict(sectors=dict(preset=settings))) - presets = PresetSector.factory("preset", nested) - assert (presets.presets.supply.values == 0).all() - assert (presets.presets.costs.values == 0).all() - - -@mark.legacy -def test_presets_from_outputs(commercial_path): - from xarray import Dataset - - from muse.readers.toml import convert - from muse.sectors import PresetSector - - settings = dict( - consumption_path=str(commercial_path / "*Consumption.csv"), - supply_path=str(commercial_path / "*[0-9]Supply.csv"), - lcoe_path=str(commercial_path / "*LCOE.csv"), - ) - nested = convert(dict(sectors=dict(preset=settings))) - presets = PresetSector.factory("preset", nested) - market = Dataset(coords=presets.presets.coords).sel(year=[2010, 2015]) - assert presets.next(market) == presets.presets.sel(year=market.year) - assert "comm_usage" in presets.next(market).coords - assert "supply" in presets.next(market) - assert "consumption" in presets.next(market) - assert "costs" in presets.next(market) - - -@mark.sgidata -@mark.legacy -def test_presets_from_regression(sectors_dir, commercial_path): - from xarray import Dataset - - from muse.readers.toml import convert - from muse.sectors import PresetSector - - settings = dict( - macrodrivers_path=sectors_dir.join("Macrodrivers.csv"), - regression_path=sectors_dir.join("Residential", "regressionparameters.csv"), - ) - nested = convert(dict(sectors=dict(residential=settings))) - presets = PresetSector.factory("residential", nested) - market = Dataset(coords=presets.presets.coords).sel(year=[2010, 2015]) - assert "forecast" not in presets.presets.dims - assert presets.next(market) == presets.presets.sel(year=market.year) - assert (presets.presets.supply.values == 0).all() - assert (presets.presets.costs.values == 0).all() - assert "consumption" in presets.next(market) - - -@mark.sgidata -@mark.legacy -def test_presets_from_regression_with_forecast(sectors_dir, commercial_path): - from xarray import Dataset - - from muse.readers.toml import convert - from muse.sectors import PresetSector - - settings = dict( - macrodrivers_path=sectors_dir.join("Macrodrivers.csv"), - regression_path=sectors_dir.join("Residential", "regressionparameters.csv"), - forecast=[0, 5, 10], - ) - nested = convert(dict(sectors=dict(residential=settings))) - presets = PresetSector.factory("residential", nested) - market = Dataset(coords=presets.presets.coords).sel(year=[2010, 2015]) - assert "forecast" in presets.presets.dims - assert (presets.presets.supply.values == 0).all() - assert (presets.presets.costs.values == 0).all() - assert "consumption" in presets.next(market) - assert "forecast" not in presets.next(market).dims - assert "year" in presets.next(market).dims - assert all(presets.next(market).year == market.year) - - -@mark.legacy -def test_presets_from_projection(commercial_path): - from xarray import Dataset - - from muse.readers.toml import convert - from muse.sectors import PresetSector - - settings = dict(costs_path=str(commercial_path / "prices.csv")) - nested = convert(dict(sectors=dict(residential=settings))) - presets = PresetSector.factory("residential", nested) - market = Dataset(coords=presets.presets.coords).sel(year=[2010, 2015]) - assert presets.next(market) == presets.presets.sel(year=market.year) - assert (presets.presets.supply.values == 0).all() - assert (presets.presets.consumption.values == 0).all() - assert "costs" in presets.next(market) - - -@mark.sgidata -@mark.legacy -def test_presets_from_demand(sectors_dir, commercial_path): - from muse.readers.toml import convert - from muse.sectors import PresetSector - - settings = dict( - commodities_path=str(commercial_path / "commodities.csv"), - demand_path=str(sectors_dir.join("Demand.csv")), - ) - nested = convert(dict(sectors=dict(residential=settings))) - presets = PresetSector.factory("residential", nested) - assert (presets.presets.supply.values == 0).all() - assert (presets.presets.costs.values == 0).all() - assert "consumption" in presets.presets diff --git a/tests/test_quantities.py b/tests/test_quantities.py index b934c3736..9f89f16b4 100644 --- a/tests/test_quantities.py +++ b/tests/test_quantities.py @@ -4,26 +4,6 @@ import xarray as xr from pytest import approx, fixture -from muse.timeslices import drop_timeslice - - -@fixture -def demand( - technologies: xr.Dataset, capacity: xr.DataArray, market: xr.DataArray -) -> xr.DataArray: - from collections.abc import Hashable, Mapping - from typing import Any - - region = xr.DataArray(list(set(capacity.region.values)), dims="region") - coords: Mapping[Hashable, Any] = { - "commodity": technologies.commodity, - "year": capacity.year, - "region": region, - "timeslice": market.timeslice, - } - data = np.random.randint(0, 5, tuple(len(u) for u in coords.values())) - return xr.DataArray(data, coords=coords, dims=tuple(coords.keys())) - @fixture def production( @@ -31,40 +11,30 @@ def production( ) -> xr.DataArray: from numpy.random import random - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import broadcast_timeslice, distribute_timeslice comms = xr.DataArray( random(len(technologies.commodity)), coords={"commodity": technologies.commodity}, dims="commodity", ) - return capacity * convert_timeslice(comms, timeslice, QuantityType.EXTENSIVE) - - -def make_array(array): - data = np.random.randint(1, 5, len(array)) - return xr.DataArray(data, dims=array.dims, coords=array.coords) + return broadcast_timeslice(capacity) * distribute_timeslice(comms) def test_supply_enduse(technologies, capacity, timeslice): """End-use part of supply.""" from muse.commodities import is_enduse from muse.quantities import maximum_production, supply - from muse.timeslices import QuantityType, convert_timeslice - production = maximum_production(technologies, capacity, timeslice) - demand = convert_timeslice( - production.sum("asset") + 1, timeslice, QuantityType.EXTENSIVE - ) + production = maximum_production(technologies, capacity) + demand = production.sum("asset") + 1 spl = supply(capacity, demand, technologies).where( is_enduse(technologies.comm_usage), 0 ) assert (abs(spl - production) < 1e-12).all() assert (spl.sum("asset") < demand).all() - demand = convert_timeslice( - production.sum("asset") * 0.7, timeslice, QuantityType.EXTENSIVE - ) + demand = production.sum("asset") * 0.7 spl = supply(capacity, demand, technologies).where( is_enduse(technologies.comm_usage), 0 ) @@ -79,7 +49,7 @@ def test_supply_emissions(technologies, capacity, timeslice): from muse.commodities import is_enduse, is_pollutant from muse.quantities import emission, maximum_production, supply - production = maximum_production(technologies, capacity, timeslices=timeslice) + production = maximum_production(technologies, capacity) spl = supply(capacity, production.sum("asset") + 1, technologies) msn = emission(spl.where(is_enduse(spl.comm_usage), 0), technologies.fixed_outputs) actual, expected = xr.broadcast( @@ -139,7 +109,7 @@ def test_decommissioning_demand(technologies, capacity, timeslice): capacity.loc[{"year": 2015}] = forecast = 1.0 technologies.fixed_outputs[:] = fouts = 0.5 technologies.utilization_factor[:] = ufac = 0.4 - decom = decommissioning_demand(technologies, capacity, timeslice, years) + decom = decommissioning_demand(technologies, capacity, years) assert set(decom.dims) == {"asset", "commodity", "year", "timeslice"} assert decom.sel(commodity=is_enduse(technologies.comm_usage)).sum( "timeslice" @@ -149,6 +119,7 @@ def test_decommissioning_demand(technologies, capacity, timeslice): def test_consumption_no_flex(technologies, production, market): from muse.commodities import is_enduse, is_fuel from muse.quantities import consumption + from muse.timeslices import broadcast_timeslice fins = ( technologies.fixed_inputs.where(is_fuel(technologies.comm_usage), 0) @@ -161,7 +132,7 @@ def test_consumption_no_flex(technologies, production, market): ) services = technologies.commodity.sel(commodity=is_enduse(technologies.comm_usage)) expected = ( - (production.rename(commodity="comm_in") * fins) + (production.rename(commodity="comm_in") * broadcast_timeslice(fins)) .sel(comm_in=production.commodity.isin(services).rename(commodity="comm_in")) .sum("comm_in") ) @@ -180,7 +151,7 @@ def test_consumption_with_flex(technologies, production, market, timeslice): from muse.commodities import is_enduse, is_fuel from muse.quantities import consumption - from muse.timeslices import QuantityType, convert_timeslice + from muse.timeslices import broadcast_timeslice, distribute_timeslice techs = technologies.copy() techs.fixed_inputs[:] = 0 @@ -206,12 +177,14 @@ def one_dim(dimension): timeslice = one_dim(market.timeslice) commodity = one_dim(market.commodity) - prices = timeslice + commodity + year * region + prices = ( + timeslice + + broadcast_timeslice(commodity) + + broadcast_timeslice(year) * broadcast_timeslice(region) + ) assert set(prices.dims) == set(market.prices.dims) noenduse = ~is_enduse(techs.comm_usage) - production = convert_timeslice( - asset * year + commodity, timeslice, QuantityType.EXTENSIVE - ) + production = distribute_timeslice(asset * year + commodity) production.loc[{"commodity": noenduse}] = 0 actual = consumption(technologies, production, prices) @@ -249,7 +222,7 @@ def one_dim(dimension): def test_production_aggregate_asset_view( - capacity: xr.DataArray, technologies: xr.Dataset, timeslice: xr.DataArray + capacity: xr.DataArray, technologies: xr.Dataset ): """Production when capacity has format of agent.sector. @@ -267,7 +240,7 @@ def test_production_aggregate_asset_view( technologies.fixed_outputs[:] = 1 technologies.utilization_factor[:] = 1 - prod = maximum_production(technologies, capacity, timeslices=timeslice) + prod = maximum_production(technologies, capacity) assert set(prod.dims) == set(capacity.dims).union({"commodity", "timeslice"}) assert prod.sel(commodity=~enduses).values == approx(0) prod, expected = xr.broadcast( @@ -277,7 +250,7 @@ def test_production_aggregate_asset_view( technologies.fixed_outputs[:] = fouts = 2 technologies.utilization_factor[:] = ufact = 0.5 - prod = maximum_production(technologies, capacity, timeslices=timeslice) + prod = maximum_production(technologies, capacity) assert prod.sel(commodity=~enduses).values == approx(0) assert set(prod.dims) == set(capacity.dims).union({"commodity", "timeslice"}) prod, expected = xr.broadcast( @@ -287,7 +260,7 @@ def test_production_aggregate_asset_view( technologies.fixed_outputs[:] = fouts = 3 technologies.utilization_factor[:] = ufact = 0.5 - prod = maximum_production(technologies, capacity, timeslices=timeslice) + prod = maximum_production(technologies, capacity) assert prod.sel(commodity=~enduses).values == approx(0) assert set(prod.dims) == set(capacity.dims).union({"commodity", "timeslice"}) prod, expected = xr.broadcast( @@ -303,7 +276,7 @@ def test_production_agent_asset_view( from muse.utilities import coords_to_multiindex, reduce_assets capacity = coords_to_multiindex(reduce_assets(capacity)).unstack("asset").fillna(0) - test_production_aggregate_asset_view(capacity, technologies, timeslice) + test_production_aggregate_asset_view(capacity, technologies) def test_capacity_in_use(production: xr.DataArray, technologies: xr.Dataset): @@ -398,221 +371,21 @@ def test_emission(production: xr.DataArray, technologies: xr.Dataset): assert em.values == approx(fout * enduses.sum().values * prod) -def test_demand_matched_production( - demand: xr.DataArray, capacity: xr.DataArray, technologies: xr.Dataset -): - from muse.commodities import CommodityUsage, is_enduse - from muse.quantities import demand_matched_production, maximum_production - - # try and make sure we have a few more outputs than the default fixture - technologies.comm_usage[:] = np.random.choice( - [CommodityUsage.PRODUCT] * 3 + list(set(technologies.comm_usage.values)), - technologies.comm_usage.shape, - ) - technologies.fixed_outputs[:] = np.random.random(technologies.fixed_outputs.shape) - technologies.fixed_outputs[:] *= is_enduse(technologies.comm_usage) - - capacity = capacity.sel(year=capacity.year.min(), drop=True) - max_prod = maximum_production(technologies, capacity, timeslices=demand.timeslice) - demand = max_prod.sum("asset") - demand[:] *= np.random.choice([0, 1, 1 / 2, 1 / 3, 1 / 10], demand.shape) - prices = xr.zeros_like(demand) - prices[:] = np.random.randint(1, 10, prices.shape) - production = demand_matched_production(demand, prices, capacity, technologies) - assert set(production.dims) == set(max_prod.dims).union(prices.dims, capacity.dims) - assert (production <= max_prod + 1e-8).all() - - -def test_costed_production_exact_match(market, capacity, technologies): - from muse.costs import annual_levelized_cost_of_energy - from muse.quantities import ( - costed_production, - maximum_production, - ) - from muse.utilities import broadcast_techs - - if set(capacity.region.values) != set(market.region.values): - capacity.region.values[: len(set(market.region.values))] = list( - set(market.region.values) - ) - technodata = broadcast_techs(technologies, capacity) - costs = annual_levelized_cost_of_energy( - prices=market.prices.sel(region=technodata.region), technologies=technodata - ) - maxdemand = ( - xr.Dataset( - dict( - mp=maximum_production( - technologies, capacity, timeslices=market.timeslice - ) - ) - ) - .groupby("region") - .sum("asset") - .mp - ) - market["consumption"] = drop_timeslice(maxdemand) - result = costed_production(market.consumption, costs, capacity, technologies) - assert isinstance(result, xr.DataArray) - actual = xr.Dataset(dict(r=result)).groupby("region").sum("asset").r - assert set(actual.dims) == set(maxdemand.dims) - for dim in actual.dims: - assert (actual[dim] == maxdemand[dim]).all() - assert np.abs(actual - maxdemand).max() < 1e-8 - - -def test_costed_production_single_region(market, capacity, technologies): - from muse.costs import annual_levelized_cost_of_energy - from muse.quantities import ( - costed_production, - maximum_production, - ) - from muse.utilities import broadcast_techs - - capacity = capacity.drop_vars("region") - capacity["region"] = "USA" - market = market.sel(region=[capacity.region.values]) - maxdemand = maximum_production( - technologies, capacity, timeslices=market.timeslice - ).sum("asset") - market["consumption"] = drop_timeslice(0.9 * maxdemand) - technodata = broadcast_techs(technologies, capacity) - costs = annual_levelized_cost_of_energy( - prices=market.prices.sel(region=technodata.region), technologies=technodata - ) - result = costed_production(market.consumption, costs, capacity, technologies) - assert isinstance(result, xr.DataArray) - actual = result.sum("asset") - assert set(actual.dims) == set(maxdemand.dims) - for dim in actual.dims: - assert (actual[dim] == maxdemand[dim]).all() - assert np.abs(actual - 0.9 * maxdemand).max() < 1e-8 - - -def test_costed_production_single_year(market, capacity, technologies): - from muse.costs import annual_levelized_cost_of_energy - from muse.quantities import ( - costed_production, - maximum_production, - ) - from muse.utilities import broadcast_techs - - capacity = capacity.sel(year=2010) - market = market.sel(year=2010) - maxdemand = ( - xr.Dataset( - dict( - mp=maximum_production( - technologies, capacity, timeslices=market.timeslice - ) - ) - ) - .groupby("region") - .sum("asset") - .mp - ) - market["consumption"] = drop_timeslice(0.9 * maxdemand) - technodata = broadcast_techs(technologies, capacity) - costs = annual_levelized_cost_of_energy( - prices=market.prices.sel(region=technodata.region), technologies=technodata - ) - result = costed_production(market.consumption, costs, capacity, technologies) - assert isinstance(result, xr.DataArray) - actual = xr.Dataset(dict(r=result)).groupby("region").sum("asset").r - assert set(actual.dims) == set(maxdemand.dims) - for dim in actual.dims: - assert (actual[dim] == maxdemand[dim]).all() - assert np.abs(actual - 0.9 * maxdemand).max() < 1e-8 - - -def test_costed_production_over_capacity(market, capacity, technologies): - from muse.costs import annual_levelized_cost_of_energy - from muse.quantities import ( - costed_production, - maximum_production, - ) - from muse.utilities import broadcast_techs - - capacity = capacity.isel(asset=[0, 1, 2]) - if set(capacity.region.values) != set(market.region.values): - capacity.region.values[: len(set(market.region.values))] = list( - set(market.region.values) - ) - maxdemand = ( - xr.Dataset( - dict( - mp=maximum_production( - technologies, capacity, timeslices=market.timeslice - ) - ) - ) - .groupby("region") - .sum("asset") - .mp - ) - market["consumption"] = drop_timeslice(maxdemand * 0.9) - technodata = broadcast_techs(technologies, capacity) - costs = annual_levelized_cost_of_energy( - prices=market.prices.sel(region=technodata.region), technologies=technodata - ) - result = costed_production(market.consumption, costs, capacity, technologies) - assert isinstance(result, xr.DataArray) - actual = xr.Dataset(dict(r=result)).groupby("region").sum("asset").r - assert set(actual.dims) == set(maxdemand.dims) - for dim in actual.dims: - assert (actual[dim] == maxdemand[dim]).all() - assert np.abs(actual - 0.9 * maxdemand).max() < 1e-8 - - -def test_costed_production_with_minimum_service(market, capacity, technologies, rng): - from muse.costs import annual_levelized_cost_of_energy - from muse.quantities import ( - costed_production, - maximum_production, - ) - from muse.utilities import broadcast_techs - - if set(capacity.region.values) != set(market.region.values): - capacity.region.values[: len(set(market.region.values))] = list( - set(market.region.values) - ) - technologies["minimum_service_factor"] = ( - technologies.utilization_factor.dims, - rng.uniform(low=0.5, high=0.9, size=technologies.utilization_factor.shape), - ) - maxprod = maximum_production(technologies, capacity, timeslices=market.timeslice) - minprod = maxprod * broadcast_techs(technologies.minimum_service_factor, maxprod) - maxdemand = xr.Dataset(dict(mp=minprod)).groupby("region").sum("asset").mp - market["consumption"] = drop_timeslice(maxdemand * 0.9) - technodata = broadcast_techs(technologies, capacity) - costs = annual_levelized_cost_of_energy( - prices=market.prices.sel(region=technodata.region), technologies=technodata - ) - result = costed_production(market.consumption, costs, capacity, technologies) - assert isinstance(result, xr.DataArray) - actual = xr.Dataset(dict(r=result)).groupby("region").sum("asset").r - assert set(actual.dims) == set(maxdemand.dims) - for dim in actual.dims: - assert (actual[dim] == maxdemand[dim]).all() - assert (actual >= 0.9 * maxdemand - 1e-8).all() - assert (result >= minprod - 1e-8).all() - - def test_min_production(technologies, capacity, timeslice): """Test minimum production quantity.""" from muse.quantities import maximum_production, minimum_production # If no minimum service factor is defined, the minimum production is zero assert "minimum_service_factor" not in technologies - production = minimum_production(technologies, capacity, timeslice) + production = minimum_production(technologies, capacity) assert (production == 0).all() # If minimum service factor is defined, then the minimum production is not zero # and it is less than the maximum production technologies["minimum_service_factor"] = 0.5 - production = minimum_production(technologies, capacity, timeslice) + production = minimum_production(technologies, capacity) assert not (production == 0).all() - assert (production <= maximum_production(technologies, capacity, timeslice)).all() + assert (production <= maximum_production(technologies, capacity)).all() def test_supply_capped_by_min_service(technologies, capacity, timeslice): @@ -621,7 +394,7 @@ def test_supply_capped_by_min_service(technologies, capacity, timeslice): from muse.quantities import minimum_production, supply technologies["minimum_service_factor"] = 0.3 - minprod = minimum_production(technologies, capacity, timeslice) + minprod = minimum_production(technologies, capacity) # If minimum service factor is defined, then the minimum production is not zero assert not (minprod == 0).all() diff --git a/tests/test_readers.py b/tests/test_readers.py index 2d2efe0c1..828020d22 100644 --- a/tests/test_readers.py +++ b/tests/test_readers.py @@ -22,21 +22,6 @@ def user_data_files(settings: dict) -> None: new_file.write_text("Some data") -@fixture -def sectors_files(settings: dict): - """Creates the files related to the sector.""" - for data in settings["sectors"].values(): - for path in data.values(): - if not isinstance(path, (Path, str)): - continue - path = Path(path) - if path.suffix != ".csv": - continue - - path.parent.mkdir(parents=True, exist_ok=True) - path.write_text("Some data") - - @fixture def plugins(settings: dict, tmp_path) -> Path: """Creates the files related to the custom modules.""" @@ -48,21 +33,6 @@ def plugins(settings: dict, tmp_path) -> Path: return plugin -@fixture -def input_file(settings: dict, tmpdir, plugins, user_data_files, sectors_files) -> Path: - """Creates a whole set of MUSE input files in a temporary directory. - - This fixture creates a temporal directory with all the folders and files required - for a successful run of the read_settings function. - """ - # Finally we create the settings file - input_file = tmpdir.join("settings.toml") - with open(input_file, "w") as f: - toml.dump(settings, f) - - return input_file - - def test_add_known_parameters(settings: dict): """Test the add_known_parameters function. @@ -134,13 +104,6 @@ def test_check_foresight(settings: dict): check_foresight(settings) -def test_check_time_slices(settings: dict): - """Tests the check_budget_parameters function.""" - from muse.readers.toml import check_time_slices - - check_time_slices(settings) - - def test_check_global_data_files(settings: dict, user_data_files): """Tests the check_global_data_files function.""" from muse.readers.toml import check_global_data_files @@ -491,8 +454,8 @@ def test_read_technodata_timeslices(tmp_path): assert isinstance(data, xr.Dataset) assert set(data.dims) == {"technology", "region", "year", "timeslice"} assert dict(data.dtypes) == dict( - utilization_factor=np.float64, - minimum_service_factor=np.float64, + utilization_factor=np.int64, + minimum_service_factor=np.int64, ) assert list(data.coords["technology"].values) == ["gasCCGT", "windturbine"] assert list(data.coords["region"].values) == ["R1"] @@ -609,11 +572,9 @@ def test_read_csv_agent_parameters(default_model): def test_read_initial_market(default_model): from muse.readers.csv import read_initial_market - from muse.readers.toml import read_settings - settings = read_settings(default_model / "settings.toml") path = default_model / "input" / "Projections.csv" - data = read_initial_market(path, timeslices=settings.timeslices) + data = read_initial_market(path) assert isinstance(data, xr.Dataset) assert set(data.dims) == {"region", "year", "commodity", "timeslice"} diff --git a/tests/test_subsector.py b/tests/test_subsector.py index 244f80bd1..3994ece31 100644 --- a/tests/test_subsector.py +++ b/tests/test_subsector.py @@ -1,4 +1,3 @@ -from collections.abc import Sequence from unittest.mock import MagicMock, patch import xarray as xr @@ -48,7 +47,7 @@ def test_subsector_investing_aggregation(): subsector = Subsector(agents, commodities) initial_agents = deepcopy(agents) assert {agent.year for agent in agents} == {int(market.year.min())} - assert subsector.aggregate_lp(technologies, market) is None + subsector.aggregate_lp(technologies, market, time_period=5, current_year=5) assert {agent.year for agent in agents} == {int(market.year.min() + 5)} for initial, final in zip(initial_agents, agents): assert initial.assets.sum() != final.assets.sum() @@ -105,20 +104,7 @@ def test_subsector_noninvesting_aggregation(market, model, technologies, tmp_pat commodity=technologies.commodity, region=technologies.region ).interp(year=[2020, 2025]) assert all(agent.year == 2020 for agent in agents) - result = subsector.aggregate_lp(technologies, market) - - assert result is not None - assert len(result) == 2 - - lpcosts, lpconstraints = result - assert isinstance(lpcosts, xr.Dataset) - assert {"search_space", "decision"} == set(lpcosts.data_vars) - assert "agent" in lpcosts.coords - assert isinstance(lpconstraints, Sequence) - assert len(lpconstraints) == 1 - assert all(isinstance(u, xr.Dataset) for u in lpconstraints) - # makes sure agent investment got called - assert all(agent.year == 2025 for agent in agents) + subsector.aggregate_lp(technologies, market, time_period=5, current_year=2020) def test_factory_smoke_test(model, technologies, tmp_path): diff --git a/tests/test_timeslices.py b/tests/test_timeslices.py index 7f478d027..0a3ae55a2 100644 --- a/tests/test_timeslices.py +++ b/tests/test_timeslices.py @@ -1,119 +1,20 @@ """Test timeslice utilities.""" -from pytest import approx, fixture +import numpy as np +from pytest import approx, fixture, raises from xarray import DataArray -from muse.timeslices import QuantityType, convert_timeslice - - -@fixture -def toml(): - return """ - ["timeslices"] - winter.weekday.day = 10 - winter.weekday.night = 5 - winter.weekend.day = 2 - winter.weekend.night = 2 - winter.weekend.dusk = 1 - summer.weekday.day = 5 - summer.weekday.night = 5 - summer.weekend.day = 2 - summer.weekend.night = 2 - summer.weekend.dusk = 1 - level_names = ["semester", "week", "day"] - [timeslices.aggregates] - allday = ["day", "night"] - """ - - -@fixture -def reference(toml): - from muse.timeslices import reference_timeslice - - return reference_timeslice(toml) - @fixture -def transforms(toml, reference): - from muse.timeslices import aggregate_transforms - - return aggregate_transforms(toml, reference) +def non_timesliced_dataarray(): + return DataArray([1, 2, 3], dims=["x"]) @fixture -def timeslice_dataarray(reference): - from pandas import MultiIndex - - return DataArray( - [1, 2, 3], - coords={ - "timeslice": MultiIndex.from_tuples( - [ - ("winter", "weekday", "allday"), - ("winter", "weekend", "dusk"), - ("summer", "weekend", "night"), - ], - names=reference.get_index("timeslice").names, - ) - }, - dims="timeslice", - ) - - -def test_convert_extensive_timeslice(reference, timeslice_dataarray, transforms): - z = convert_timeslice( - timeslice_dataarray, reference, finest=reference, transforms=transforms - ) - assert z.shape == reference.shape - assert z.values == approx( - [ - float( - timeslice_dataarray[0] * reference[0] / (reference[0] + reference[1]) - ), - float( - timeslice_dataarray[0] * reference[1] / (reference[0] + reference[1]) - ), - 0, - 0, - float(timeslice_dataarray[1]), - 0, - 0, - 0, - float(timeslice_dataarray[2]), - 0, - ] - ) - - -def test_convert_intensive_timeslice(reference, timeslice_dataarray, transforms): - z = convert_timeslice( - timeslice_dataarray, - reference, - finest=reference, - transforms=transforms, - quantity=QuantityType.INTENSIVE, - ) - - assert z.values == approx( - [ - float(timeslice_dataarray[0]), - float(timeslice_dataarray[0]), - 0, - 0, - float(timeslice_dataarray[1]), - 0, - 0, - 0, - float(timeslice_dataarray[2]), - 0, - ] - ) - - -def test_reference_timeslice(): +def timeslice(): from toml import loads - from muse.timeslices import reference_timeslice + from muse.timeslices import read_timeslices inputs = loads( """ @@ -139,18 +40,19 @@ def test_reference_timeslice(): """ ) - ts = reference_timeslice(inputs) + ts = read_timeslices(inputs) assert isinstance(ts, DataArray) assert "timeslice" in ts.coords + return ts def test_no_overlap(): from pytest import raises - from muse.timeslices import reference_timeslice + from muse.timeslices import read_timeslices with raises(ValueError): - reference_timeslice( + read_timeslices( """ [timeslices] winter.weekday.night = 396 @@ -163,106 +65,224 @@ def test_no_overlap(): ) -def test_aggregate_transforms_no_aggregates(): - from itertools import product +def test_drop_timeslice(non_timesliced_dataarray, timeslice): + from muse.timeslices import broadcast_timeslice, drop_timeslice - from numpy import ndarray, zeros + # Test on array with timeslice data + timesliced_dataarray = broadcast_timeslice(non_timesliced_dataarray, ts=timeslice) + dropped = drop_timeslice(timesliced_dataarray) + coords_to_check = {"timeslice", "month", "day", "hour"} + assert coords_to_check.issubset(timesliced_dataarray.coords) + assert not coords_to_check.intersection(dropped.coords) - from muse.timeslices import aggregate_transforms, reference_timeslice + # Test on arrays without timeslice data + assert drop_timeslice(non_timesliced_dataarray).equals(non_timesliced_dataarray) + assert drop_timeslice(dropped).equals(dropped) - reference = reference_timeslice( - """ - [timeslices] - spring.weekday = 396 - spring.weekend = 396 - autumn.weekday = 396 - autumn.weekend = 156 - """ - ) - vectors = aggregate_transforms(timeslice=reference) - assert isinstance(vectors, dict) - assert set(vectors) == set(product(["spring", "autumn"], ["weekday", "weekend"])) - for i in range(reference.shape[0]): - index = reference.timeslice[i].values.tolist() - vector = vectors[index] - assert isinstance(vector, ndarray) - expected = zeros(reference.shape, dtype=int) - expected[i] = 1 - assert vector == approx(expected) +def test_broadcast_timeslice(non_timesliced_dataarray, timeslice): + from muse.timeslices import broadcast_timeslice, compress_timeslice + # Broadcast array to different levels of granularity + for level in ["month", "day", "hour"]: + out = broadcast_timeslice(non_timesliced_dataarray, ts=timeslice, level=level) + target_timeslices = compress_timeslice( + timeslice, ts=timeslice, level=level, operation="sum" + ) -def test_aggregate_transforms_with_aggregates(): - from itertools import product + # Check that timeslicing in output matches the global scheme + assert out.timeslice.equals(target_timeslices.timeslice) - from toml import loads + # Check that all timeslices in the output are equal to each other + assert (out.diff(dim="timeslice") == 0).all() - from muse.timeslices import aggregate_transforms, reference_timeslice + # Check that all values in the output are equal to the input + assert all( + (out.isel(timeslice=i) == non_timesliced_dataarray).all() + for i in range(out.sizes["timeslice"]) + ) - toml = loads( - """ - [timeslices] - spring.weekday.day = 396 - spring.weekday.night = 396 - spring.weekend.day = 156 - spring.weekend.night = 156 - summer.weekday.day = 396 - summer.weekday.night = 396 - summer.weekend.day = 156 - summer.weekend.night = 156 - autumn.weekday.day = 396 - autumn.weekday.night = 396 - autumn.weekend.day = 156 - autumn.weekend.night = 156 - winter.weekday.day = 396 - winter.weekday.night = 396 - winter.weekend.day = 156 - winter.weekend.night = 156 + # Calling on a fully timesliced array: the input should be returned unchanged + out2 = broadcast_timeslice(out, ts=timeslice) + assert out2.equals(out) - [timeslices.aggregates] - springautumn = ["spring", "autumn"] - allday = ["day", "night"] - week = ["weekday", "weekend"] - """ - ) - reference = reference_timeslice(toml) - - vectors = aggregate_transforms(toml, reference) - assert isinstance(vectors, dict) - assert set(vectors) == set( - product( - ["winter", "spring", "summer", "autumn", "springautumn"], - ["weekend", "weekday", "week"], - ["day", "night", "allday"], + # Calling on an array with inappropriate timeslicing: ValueError should be raised + with raises(ValueError): + broadcast_timeslice( + compress_timeslice(out, ts=timeslice, level="day"), ts=timeslice ) + + +def test_distribute_timeslice(non_timesliced_dataarray, timeslice): + from muse.timeslices import ( + broadcast_timeslice, + compress_timeslice, + distribute_timeslice, ) - def to_bitstring(x): - return "".join(x.astype(str)) + # Distribute array to different levels of granularity + for level in ["month", "day", "hour"]: + out = distribute_timeslice(non_timesliced_dataarray, ts=timeslice, level=level) + target_timeslices = compress_timeslice( + timeslice, ts=timeslice, level=level, operation="sum" + ) + + # Check that timeslicing in output matches the global scheme + assert out.timeslice.equals(target_timeslices.timeslice) + + # Check that all values are proportional to timeslice lengths + out_proportions = out / broadcast_timeslice( + out.sum("timeslice"), ts=timeslice, level=level + ) + ts_proportions = target_timeslices / broadcast_timeslice( + target_timeslices.sum("timeslice"), ts=timeslice, level=level + ) + assert abs(out_proportions - ts_proportions).max() < 1e-6 + + # Check that the sum across timeslices is equal to the input + assert (out.sum("timeslice") == approx(non_timesliced_dataarray)).all() - assert to_bitstring(vectors[("spring", "weekday", "night")]) == "0100000000000000" - assert to_bitstring(vectors[("autumn", "weekday", "night")]) == "0000000001000000" - assert to_bitstring(vectors[("spring", "weekend", "night")]) == "0001000000000000" - assert to_bitstring(vectors[("autumn", "weekend", "night")]) == "0000000000010000" - assert ( - to_bitstring(vectors[("springautumn", "weekday", "night")]) - == "0100000001000000" + # Calling on a fully timesliced array: the input should be returned unchanged + out2 = distribute_timeslice(out, ts=timeslice) + assert out2.equals(out) + + # Calling on an array with inappropraite timeslicing: ValueError should be raised + with raises(ValueError): + distribute_timeslice( + compress_timeslice(out, ts=timeslice, level="day"), ts=timeslice + ) + + +def test_compress_timeslice(non_timesliced_dataarray, timeslice): + from muse.timeslices import broadcast_timeslice, compress_timeslice, get_level + + # Create timesliced dataarray for compressing + timesliced_dataarray = broadcast_timeslice(non_timesliced_dataarray, ts=timeslice) + + # Compress array to different levels of granularity + for level in ["month", "day", "hour"]: + # Sum operation + out = compress_timeslice( + timesliced_dataarray, ts=timeslice, operation="sum", level=level + ) + assert get_level(out) == level + assert ( + out.sum("timeslice") == approx(timesliced_dataarray.sum("timeslice")) + ).all() + + # Mean operation + out = compress_timeslice( + timesliced_dataarray, ts=timeslice, operation="mean", level=level + ) + assert get_level(out) == level + assert ( + out.mean("timeslice") == approx(timesliced_dataarray.mean("timeslice")) + ).all() # NB in general this should be a weighted mean, but this works here + # because the data is equal in every timeslice + + # Calling without specifying a level: the input should be returned unchanged + out = compress_timeslice(timesliced_dataarray, ts=timeslice) + assert out.equals(timesliced_dataarray) + + # Calling with an invalid level: ValueError should be raised + with raises(ValueError): + compress_timeslice(timesliced_dataarray, ts=timeslice, level="invalid") + + # Calling with an invalid operation: ValueError should be raised + with raises(ValueError): + compress_timeslice( + timesliced_dataarray, ts=timeslice, level="day", operation="invalid" + ) + + +def test_expand_timeslice(non_timesliced_dataarray, timeslice): + from muse.timeslices import broadcast_timeslice, expand_timeslice + + # Different starting points for expansion + for level in ["month", "day", "hour"]: + timesliced_dataarray = broadcast_timeslice( + non_timesliced_dataarray, ts=timeslice, level=level + ) + + # Broadcast operation + out = expand_timeslice( + timesliced_dataarray, ts=timeslice, operation="broadcast" + ) + assert out.timeslice.equals(timeslice.timeslice) + assert ( + out.mean("timeslice") == approx(timesliced_dataarray.mean("timeslice")) + ).all() + + # Distribute operation + out = expand_timeslice( + timesliced_dataarray, ts=timeslice, operation="distribute" + ) + assert out.timeslice.equals(timeslice.timeslice) + assert ( + out.sum("timeslice") == approx(timesliced_dataarray.sum("timeslice")) + ).all() + + # Calling on an already expanded array: the input should be returned unchanged + out2 = expand_timeslice(out, ts=timeslice) + assert out.equals(out2) + + # Calling with an invalid operation: ValueError should be raised + with raises(ValueError): + timesliced_dataarray = broadcast_timeslice( + non_timesliced_dataarray, ts=timeslice, level="month" + ) + expand_timeslice(timesliced_dataarray, ts=timeslice, operation="invalid") + + +def test_get_level(non_timesliced_dataarray, timeslice): + from muse.timeslices import broadcast_timeslice, get_level + + for level in ["month", "day", "hour"]: + timesliced_dataarray = broadcast_timeslice( + non_timesliced_dataarray, ts=timeslice, level=level + ) + assert get_level(timesliced_dataarray) == level + + # Should raise error with non-timesliced array + with raises(ValueError): + get_level(non_timesliced_dataarray) + + +def test_sort_timeslices(non_timesliced_dataarray, timeslice): + from muse.timeslices import broadcast_timeslice, sort_timeslices + + # Finest timeslice level -> should match ordering of `timeslice` + timesliced_dataarray = broadcast_timeslice( + non_timesliced_dataarray, ts=timeslice, level="hour" ) - assert to_bitstring(vectors[("spring", "week", "night")]) == "0101000000000000" - assert ( - to_bitstring(vectors[("springautumn", "week", "night")]) == "0101000001010000" + sorted = sort_timeslices(timesliced_dataarray, timeslice) + assert sorted.timeslice.equals(timeslice.timeslice) + assert not sorted.timeslice.equals( + timesliced_dataarray.sortby("timeslice").timeslice + ) # but could be true if the timeslices in `timeslice` are in alphabetical order + + # Coarser timeslice level -> should match xarray sortby + timesliced_dataarray = broadcast_timeslice( + non_timesliced_dataarray, ts=timeslice, level="month" ) + sorted = sort_timeslices(timesliced_dataarray, timeslice) + assert sorted.timeslice.equals(timesliced_dataarray.sortby("timeslice").timeslice) -def test_drop_timeslice(timeslice_dataarray): - from muse.timeslices import drop_timeslice +def test_timeslice_max(non_timesliced_dataarray): + from muse.timeslices import broadcast_timeslice, read_timeslices, timeslice_max - dropped = drop_timeslice(timeslice_dataarray) - coords_to_check = {"timeslice", "semester", "week", "day"} - assert coords_to_check.issubset(timeslice_dataarray.coords) - assert not coords_to_check.intersection(dropped.coords) - - # Test on arrays without timeslice data - data_without_timeslice = DataArray([1, 2, 3], dims=["x"]) - assert drop_timeslice(data_without_timeslice).equals(data_without_timeslice) - assert drop_timeslice(dropped).equals(dropped) + # With two equal timeslice lengths, this should be equivalent to max * 2 + ts = read_timeslices( + """ + [timeslices] + winter.weekday.night = 396 + winter.weekday.morning = 396 + """ + ) + timesliced_dataarray = broadcast_timeslice(non_timesliced_dataarray, ts=ts) + timesliced_dataarray = timesliced_dataarray + np.random.rand( + *timesliced_dataarray.shape + ) + timeslice_max_dataarray = timeslice_max(timesliced_dataarray, ts=ts) + assert timeslice_max_dataarray.equals(timesliced_dataarray.max("timeslice") * 2) diff --git a/tests/test_trade.py b/tests/test_trade.py index bafa07db9..2398b47d9 100644 --- a/tests/test_trade.py +++ b/tests/test_trade.py @@ -102,14 +102,13 @@ def test_lp_costs(): technologies = examples.technodata("power", model="trade") search_space = examples.search_space("power", model="trade") - timeslices = examples.sector("power", model="trade").timeslices costs = ( search_space * np.arange(np.prod(search_space.shape)).reshape(search_space.shape) * xr.ones_like(technologies.dst_region) ) - lpcosts = lp_costs(technologies.sel(year=2020, drop=True), costs, timeslices) + lpcosts = lp_costs(technologies.sel(year=2020, drop=True), costs) assert "capacity" in lpcosts.data_vars assert "production" in lpcosts.data_vars assert set(lpcosts.capacity.dims) == {"agent", "replacement", "dst_region"} diff --git a/tests/test_utilities.py b/tests/test_utilities.py index f5d3ab63a..4dd15af4d 100644 --- a/tests/test_utilities.py +++ b/tests/test_utilities.py @@ -1,6 +1,6 @@ import numpy as np import xarray as xr -from pytest import approx, mark +from pytest import approx, mark, raises def make_array(array): @@ -37,7 +37,7 @@ def test_reduce_assets_with_zero_size(capacity: xr.DataArray): x = capacity.sel(asset=[]) actual = reduce_assets(x) - assert actual is x + assert (actual == x).all() def test_broadcast_tech(technologies, capacity): @@ -296,3 +296,24 @@ def test_avoid_repetitions(): assert 3 * len(result.year) == 2 * len(assets.year) original = result.interp(year=assets.year, method="linear") assert (original == assets).all() + + +def test_check_dimensions(): + from muse.utilities import check_dimensions + + data = xr.DataArray( + np.random.rand(4, 5), + dims=["dim1", "dim2"], + coords={"dim1": range(4), "dim2": range(5)}, + ) + + # Valid + check_dimensions(data, required=["dim1"], optional=["dim2"]) + + # Missing required + with raises(ValueError, match="Missing required dimensions"): + check_dimensions(data, required=["dim1", "dim3"], optional=["dim2"]) + + # Extra dimension + with raises(ValueError, match="Extra dimensions"): + check_dimensions(data, required=["dim1"])