diff --git a/.github/workflows/tests-coverage.yml b/.github/workflows/tests-coverage.yml index e0a72b1c..6bcc81f6 100644 --- a/.github/workflows/tests-coverage.yml +++ b/.github/workflows/tests-coverage.yml @@ -20,13 +20,16 @@ jobs: include: - name-suffix: "coverage" os: ubuntu-latest - python-version: 3.8 + python-version: 3.9 - name-suffix: "basic" os: ubuntu-latest - python-version: 3.9 + python-version: "3.10" + - name-suffix: "basic" + os: ubuntu-latest + python-version: 3.11 - name-suffix: "basic" os: windows-latest - python-version: 3.8 + python-version: 3.9 steps: - name: Checkout repo @@ -38,7 +41,7 @@ jobs: python-version: ${{ matrix.python-version }} - name: Set up julia - if: runner.os == 'Linux' && matrix.python-version == 3.8 && matrix.name-suffix == 'coverage' + if: runner.os == 'Linux' uses: julia-actions/setup-julia@v1 with: version: "1.6" @@ -58,14 +61,20 @@ jobs: environment-file: eDisGo_env_dev.yml python-version: ${{ matrix.python-version }} - - name: Run tests - if: ${{ !(runner.os == 'Linux' && matrix.python-version == 3.8 && matrix.name-suffix == 'coverage') }} + - name: Run tests Linux + if: runner.os == 'Linux' && matrix.name-suffix != 'coverage' + run: | + python -m pip install pytest pytest-notebook + python -m pytest --runslow --runonlinux --disable-warnings --color=yes -v + + - name: Run tests Windows + if: runner.os == 'Windows' run: | python -m pip install pytest pytest-notebook python -m pytest --runslow --disable-warnings --color=yes -v - name: Run tests, coverage and send to coveralls - if: runner.os == 'Linux' && matrix.python-version == 3.8 && matrix.name-suffix == 'coverage' + if: runner.os == 'Linux' && matrix.python-version == 3.9 && matrix.name-suffix == 'coverage' run: | pip install pytest pytest-notebook coveralls coverage run --source=edisgo -m pytest --runslow --runonlinux --disable-warnings --color=yes -v diff --git a/doc/conf.py b/doc/conf.py index 32f385cb..30cafa8a 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -71,8 +71,6 @@ ] # Files to ignore when building api documentation autoapi_ignore = [ - "*/flex_opt/curtailment.py", - "*/flex_opt/storage_positioning.py", "*/opf/timeseries_reduction.py", "*/opf/opf_solutions/*", ] @@ -116,7 +114,7 @@ def setup(sphinx): "networkx.%s", ), "sqlalchemy": ( - "http://docs.sqlalchemy.org/en/latest/core/connections.html#%s", + "https://docs.sqlalchemy.org/en/latest/core/connections.html#%s", "sqlalchemy.%s", ), "numpy": ( @@ -128,12 +126,24 @@ def setup(sphinx): "shapely.%s", ), "ding0": ("https://dingo.readthedocs.io/en/dev/api/ding0.html#%s", "ding0.%s"), - "pypsa": ("https://pypsa.readthedocs.io/en/latest/components.html#%s", "pypsa.%s"), + "pypsa": ( + "https://pypsa.readthedocs.io/en/latest/user-guide/components.html#%s", + "pypsa.%s", + ), "plotly": ( "https://plotly.com/python-api-reference/generated/%s.html", "plotly.%s", ), } +# ignore the following external links when checking the links +# stackoverflow and gurobi is listed here because for some reason +# the link check fails for these +# in the github action, even though the link is correct +linkcheck_ignore = [ + r"https://stackoverflow.com*", + r"https://support.gurobi.com/*", +] + # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] diff --git a/doc/dev_notes.rst b/doc/dev_notes.rst index e012ce87..5d7764e4 100644 --- a/doc/dev_notes.rst +++ b/doc/dev_notes.rst @@ -18,13 +18,12 @@ Installation using Linux ~~~~~~~~~~~~~~~~~~~~~~~~ To set up a source installation using linux simply use a virtual environment and install -the source code with pip. Make sure to use python3.7 or higher (recommended -python3.8). **After** setting up your virtual environment and activating it run the +the source code with pip. Make sure to use python3.9 or higher. **After** setting up your virtual environment and activating it run the following commands within your eDisGo directory: .. code-block:: bash - python -m pip install -e .[full] # install eDisGo from source + python -m pip install -e .[dev] # install eDisGo from source pre-commit install # install pre-commit hooks diff --git a/doc/index.rst b/doc/index.rst index dfebb436..444e35f0 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -15,7 +15,7 @@ The toolbox currently includes: * `ding0 `_ tool for synthetic medium and low voltage grid topologies for the whole of Germany - * `OpenEnergy DataBase (oedb) `_ for + * `OpenEnergy DataBase (oedb) `_ for feed-in time series of fluctuating renewables and scenarios for future power plant park of Germany * `demandlib `_ for electrical load time series diff --git a/doc/quickstart.rst b/doc/quickstart.rst index 21d38303..bc537cc4 100644 --- a/doc/quickstart.rst +++ b/doc/quickstart.rst @@ -3,11 +3,11 @@ Getting started ================ +.. warning:: Make sure to use python 3.9 or higher! + Installation using Linux ------------------------- -.. warning:: Make sure to use python 3.8 or higher! - Install latest eDisGo version through pip. Therefore, we highly recommend using a virtual environment and its pip. @@ -21,8 +21,6 @@ You may also consider installing a developer version as detailed in Installation using Windows -------------------------- -.. warning:: Make sure to use python 3.8 or higher! - For Windows users we recommend using Anaconda and to install the geo stack using the conda-forge channel prior to installing eDisGo. You may use the provided `eDisGo_env.yml file `_ @@ -44,117 +42,29 @@ Installation using MacOS We don't have any experience with our package on MacOS yet! If you try eDisGo on MacOS we would be happy if you let us know about your experience! -Requirements for edisgoOPF package ----------------------------------- - -.. warning:: The non-linear optimal power flow is currently not maintained and might not work out of the box! - -To use the multiperiod optimal power flow that is provided in the julia package -edisgoOPF in eDisGo you additionally need to install julia version 1.1.1. -Download julia from -`julia download page `_ and -add it to your path (see -`platform specific instructions `_ -for more information). - -Before using the edisgoOPF julia package for the first time you need to -instantiate it. Therefore, in a terminal change directory to the edisgoOPF -package located in eDisGo/edisgo/opf/edisgoOPF and call julia from there. -Change to package mode by typing - -.. code-block:: bash - - ] - -Then activate the package: - -.. code-block:: bash - - (v1.0) pkg> activate . - -And finally instantiate it: - -.. code-block:: bash - - (SomeProject) pkg> instantiate - -.. _prerequisites: - -Additional linear solver -^^^^^^^^^^^^^^^^^^^^^^^^^ - -As with the default linear solver in Ipopt (local solver used in the OPF) -the limit for prolem sizes is reached quite quickly, you may want to instead use -the solver HSL_MA97. -The steps required to set up HSL are also described in the -`Ipopt Documentation `_. -Here is a short version for reference: - -First, you need to obtain an academic license for HSL Solvers. -Under https://www.hsl.rl.ac.uk/ipopt/ download the sources for Coin-HSL Full (Stable). -You will need to provide an institutional e-mail to gain access. - -Unpack the tar.gz: +Additional requirements for Optimal Power Flow +--------------------------------------------------- -.. code-block:: bash - - tar -xvzf coinhsl-2014.01.10.tar.gz - -To install the solver, clone the Ipopt Third Party HSL tools: - -.. code-block:: bash - - git clone https://github.com/coin-or-tools/ThirdParty-HSL.git - cd ThirdParty-HSL - - -Under `ThirdParty-HSL`, create a folder for the HSL sources named `coinhsl` and -copy the contents of the HSL archive into it. -Under Ubuntu, you'll need BLAS, LAPACK and GCC for Fortran. If you don't have them, install them via: - -.. code-block:: bash - - sudo apt-get install libblas-dev liblapack-dev gfortran +In order to use the optimal power flow, you additionally need: -You can then configure and install your HSL Solvers: +1. **Julia**: Version 1.6.7 is required. +2. **Gurobi Optimizer**: A powerful optimization solver. -.. code-block:: bash - - ./configure - make - sudo make install - -To make Ipopt pick up the solver, you need to add it to your path. -During install, there will be an output that tells you where the libraries have -been put. Usually like this: +Installation Steps +^^^^^^^^^^^^^^^^^^^ -.. code-block:: bash - - Libraries have been installed in: - /usr/local/lib +1. Install Julia 1.6.7 -Add this path to the variable `LD_LIBRARY_PATH`: +Download Julia 1.6.7 from the `Julia LTS releases page `_. -.. code-block:: bash - - export LD_LIBRARY="/usr/local/bin":$LD_LIBRARY_PATH - -You might also want to add this to your .bashrc to make it persistent. - -For some reason, Ipopt looks for a library named `libhsl.so`, which is not what -the file is named, so we'll also need to provide a symlink: - -.. code-block:: bash +Install Julia by following the instructions in the `Julia installation guide `_. Make sure to add Julia to your system path. - cd /usr/local/lib - ln -s libcoinhsl.so libhsl.so -MA97 should now work and can be called from Julia with: +2. Install Gurobi -.. code-block:: julia - JuMP.setsolver(pm.model,IpoptSolver(linear_solver="ma97")) +Follow the `Gurobi installation guide `_ to install Gurobi and add it to your system path. Prerequisites ------------- @@ -163,7 +73,7 @@ Beyond a running and up-to-date installation of eDisGo you need **grid topology data**. Currently synthetic grid data generated with the python project `Ding0 `_ is the only supported data source. You can retrieve data from -`Zenodo `_ +`Zenodo `_ (make sure you choose latest data) or check out the `Ding0 documentation `_ on how to generate grids yourself. @@ -188,8 +98,8 @@ Aside from grid topology data you may eventually need a dataset on future installation of power plants. You may therefore use the scenarios developed in the `open_eGo `_ project that are available in the -`OpenEnergy DataBase (oedb) `_ -hosted on the `OpenEnergy Platform (OEP) `_. +`OpenEnergy DataBase (oedb) `_ +hosted on the `OpenEnergy Platform (OEP) `_. eDisGo provides an interface to the oedb using the package `ego.io `_. ego.io gives you a python SQL-Alchemy representations of the oedb and access to it by using the diff --git a/doc/usage_details.rst b/doc/usage_details.rst index aac09982..a35ba1e2 100644 --- a/doc/usage_details.rst +++ b/doc/usage_details.rst @@ -241,7 +241,7 @@ This mode can be invoked as follows: For the following components you can use existing time series: * Fluctuating generators: Feed-in time series for solar and wind power plants can be - retrieved from the `OpenEnergy DataBase `_. + retrieved from the `OpenEnergy DataBase `_. * Conventional loads: Standard load profiles for the different sectors residential, commercial, agricultural and industrial are generated using the oemof `demandlib `_. diff --git a/doc/whatsnew/v0-3-0.rst b/doc/whatsnew/v0-3-0.rst index 3eeccb3c..7cf4110a 100644 --- a/doc/whatsnew/v0-3-0.rst +++ b/doc/whatsnew/v0-3-0.rst @@ -22,7 +22,9 @@ Changes * Added method to aggregate LV grid buses to station bus secondary side `#353 `_ * Adapted codebase to work with pandas 2.0 `#373 `_ * Added option to run reinforcement with reduced number of time steps `#379 `_ + (adapted in `#395 `_) * Added optimization method to determine dispatch of flexibilities that lead to minimal network expansion costs `#376 `_ * Added a new reinforcement method that separate lv grids when the overloading is very high `#380 `_ * Move function to assign feeder to Topology class and add methods to the Grid class to get information on the feeders `#360 `_ * Added a storage operation strategy where the storage is charged when PV feed-in is higher than electricity demand of the household and discharged when electricity demand exceeds PV generation `#386 `_ +* Added an estimation of the voltage deviation over a cable when selecting a suitable cable to connect a new component `#411 `_ diff --git a/eDisGo_env.yml b/eDisGo_env.yml index f3530124..c9f4e623 100644 --- a/eDisGo_env.yml +++ b/eDisGo_env.yml @@ -3,9 +3,9 @@ channels: - conda-forge - defaults dependencies: - - python >= 3.8, < 3.10 + - python >= 3.9, <= 3.11 - pip - - pandas >= 1.4 + - pandas >= 1.4, < 2.2.0 - conda-forge::fiona - conda-forge::geopy - conda-forge::geopandas @@ -16,6 +16,6 @@ dependencies: - conda-forge::pygeos - conda-forge::contextily - conda-forge::descartes - - conda-forge::pypsa >= 0.17.0, <= 0.20.1 + - conda-forge::pypsa == 0.26.2 - pip: - eDisGo diff --git a/eDisGo_env_dev.yml b/eDisGo_env_dev.yml index a5986609..859e3954 100644 --- a/eDisGo_env_dev.yml +++ b/eDisGo_env_dev.yml @@ -3,9 +3,9 @@ channels: - conda-forge - defaults dependencies: - - python >= 3.8, < 3.10 + - python >= 3.9, <= 3.11 - pip - - pandas >= 1.4 + - pandas >= 1.4, < 2.2.0 - conda-forge::fiona - conda-forge::geopy - conda-forge::geopandas @@ -16,6 +16,6 @@ dependencies: - conda-forge::pygeos - conda-forge::contextily - conda-forge::descartes - - conda-forge::pypsa >= 0.17.0, <= 0.20.1 + - conda-forge::pypsa == 0.26.2 - pip: - -e .[dev] diff --git a/edisgo/config/config_grid_default.cfg b/edisgo/config/config_grid_default.cfg index f48c68d8..1cd82925 100644 --- a/edisgo/config/config_grid_default.cfg +++ b/edisgo/config/config_grid_default.cfg @@ -50,6 +50,11 @@ upper_limit_voltage_level_6 = 0.2 upper_limit_voltage_level_5 = 5.5 upper_limit_voltage_level_4 = 20.0 +# from VDE-AR-N 4100 (VDE-AR-N 4100) Anwendungsregel: 2019-04, table 3 +lv_max_voltage_deviation = 0.03 +# from VDE-AR-N 4110 (VDE-AR-N 4110) Anwendungsregel: 2023-09, 5.3.2 Zulässige Spannungsänderung +mv_max_voltage_deviation = 0.02 + [disconnecting_point] # Positioning of disconnecting points: Can be position at location of most diff --git a/edisgo/config/config_timeseries_default.cfg b/edisgo/config/config_timeseries_default.cfg index bfb97351..84c6074b 100644 --- a/edisgo/config/config_timeseries_default.cfg +++ b/edisgo/config/config_timeseries_default.cfg @@ -88,16 +88,16 @@ lv_load_case_hp = 1.0 # =========================== # power factors used to generate reactive power time series for loads and generators -mv_gen = 0.9 -mv_load = 0.9 -mv_storage = 0.9 -mv_cp = 1.0 -mv_hp = 1.0 -lv_gen = 0.95 -lv_load = 0.95 -lv_storage = 0.95 -lv_cp = 1.0 -lv_hp = 1.0 +mv_generator = 0.9 +mv_conventional_load = 0.9 +mv_storage_unit = 0.9 +mv_charging_point = 1.0 +mv_heat_pump = 1.0 +lv_generator = 0.95 +lv_conventional_load = 0.95 +lv_storage_unit = 0.95 +lv_charging_point = 1.0 +lv_heat_pump = 1.0 [reactive_power_mode] @@ -105,16 +105,16 @@ lv_hp = 1.0 # =========================== # power factor modes used to generate reactive power time series for loads and generators -mv_gen = inductive -mv_load = inductive -mv_storage = inductive -mv_cp = inductive -mv_hp = inductive -lv_gen = inductive -lv_load = inductive -lv_storage = inductive -lv_cp = inductive -lv_hp = inductive +mv_generator = inductive +mv_conventional_load = inductive +mv_storage_unit = inductive +mv_charging_point = inductive +mv_heat_pump = inductive +lv_generator = inductive +lv_conventional_load = inductive +lv_storage_unit = inductive +lv_charging_point = inductive +lv_heat_pump = inductive [demandlib] @@ -129,6 +129,8 @@ week_day = 0.8 week_night = 0.6 weekend_day = 0.6 weekend_night = 0.6 +holiday_day = 0.6 +holiday_night = 0.6 # tuple specifying the beginning/end of a workday (e.g. 18:00) day_start = 6:00 day_end = 22:00 diff --git a/edisgo/edisgo.py b/edisgo/edisgo.py index a8e3d2cf..ebe2e36f 100755 --- a/edisgo/edisgo.py +++ b/edisgo/edisgo.py @@ -417,7 +417,7 @@ def set_time_series_active_power_predefined( Technology- and weather cell-specific hourly feed-in time series are obtained from the `OpenEnergy DataBase - `_. See + `_. See :func:`edisgo.io.timeseries_import.feedin_oedb` for more information. This option requires that the parameter `engine` is provided in case @@ -478,7 +478,7 @@ def set_time_series_active_power_predefined( Sets active power demand time series using individual hourly electricity load time series for one year obtained from the `OpenEnergy DataBase - `_. + `_. This option requires that the parameters `engine` and `scenario` are provided. For further settings, the parameter `timeindex` can also be @@ -933,7 +933,7 @@ def import_generators(self, generator_scenario=None, **kwargs): Gets generator park for specified scenario and integrates generators into grid. The generator data is retrieved from the - `open energy platform `_. Decommissioned + `open energy platform `_. Decommissioned generators are removed from the grid, generators with changed capacity updated and new generators newly integrated into the grid. @@ -998,12 +998,12 @@ def analyze( range_num: int = 10, scale_timeseries: float | None = None, **kwargs, - ) -> tuple[pd.DataFrame, pd.DataFrame]: + ) -> tuple[pd.DatetimeIndex, pd.DatetimeIndex]: """ Conducts a static, non-linear power flow analysis. Conducts a static, non-linear power flow analysis using - `PyPSA `_ and writes results (active, reactive and apparent power as well as current on lines and voltages at buses) to :class:`~.network.results.Results` @@ -1196,6 +1196,7 @@ def _scale_timeseries(pypsa_network_copy, fraction): def reinforce( self, timesteps_pfa: str | pd.DatetimeIndex | pd.Timestamp | None = None, + reduced_analysis: bool = False, copy_grid: bool = False, max_while_iterations: int = 20, split_voltage_band: bool = True, @@ -1237,14 +1238,15 @@ def reinforce( time steps. If your time series already represents the worst-case, keep the default value of None because finding the worst-case snapshots takes some time. - * 'reduced_analysis' - Reinforcement is conducted for all time steps at which at least one - branch shows its highest overloading or one bus shows its highest voltage - violation. * :pandas:`pandas.DatetimeIndex` or \ :pandas:`pandas.Timestamp` Use this option to explicitly choose which time steps to consider. - + reduced_analysis : bool + If True, reinforcement is conducted for all time steps at which at least + one branch shows its highest overloading or one bus shows its highest + voltage violation. Time steps to consider are specified through parameter + `timesteps_pfa`. If False, all time steps in parameter `timesteps_pfa` + are used. Default: False. copy_grid : bool If True, reinforcement is conducted on a copied grid and discarded. Default: False. @@ -1301,26 +1303,43 @@ def reinforce( reinforce MV/LV stations for LV worst-cases. Default: False. num_steps_loading : int - In case `timesteps_pfa` is set to 'reduced_analysis', this parameter can be + In case `reduced_analysis` is set to True, this parameter can be used to specify the number of most critical overloading events to consider. If None, `percentage` is used. Default: None. num_steps_voltage : int - In case `timesteps_pfa` is set to 'reduced_analysis', this parameter can be + In case `reduced_analysis` is set to True, this parameter can be used to specify the number of most critical voltage issues to select. If None, `percentage` is used. Default: None. percentage : float - In case `timesteps_pfa` is set to 'reduced_analysis', this parameter can be + In case `reduced_analysis` is set to True, this parameter can be used to specify the percentage of most critical time steps to select. The default is 1.0, in which case all most critical time steps are selected. Default: 1.0. use_troubleshooting_mode : bool - In case `timesteps_pfa` is set to 'reduced_analysis', this parameter can be - used to specify how to handle non-convergence issues in the power flow - analysis. If set to True, non-convergence issues are tried to be + In case `reduced_analysis` is set to True, this parameter can be used to + specify how to handle non-convergence issues when determining the most + critical time steps. If set to True, non-convergence issues are tried to be circumvented by reducing load and feed-in until the power flow converges. The most critical time steps are then determined based on the power flow results with the reduced load and feed-in. If False, an error will be - raised in case time steps do not converge. Default: True. + raised in case time steps do not converge. + Setting this to True doesn't make sense for the grid reinforcement as the + troubleshooting mode is only used when determining the most critical time + steps not when running a power flow analysis to determine grid reinforcement + needs. To handle non-convergence in the grid reinforcement set parameter + `catch_convergence_problems` to True. + Default: False. + run_initial_analyze : bool + In case `reduced_analysis` is set to True, this parameter can be + used to specify whether to run an initial analyze to determine most + critical time steps or to use existing results. If set to False, + `use_troubleshooting_mode` is ignored. Default: True. + weight_by_costs : bool + In case `reduced_analysis` is set to True, this parameter can be used + to specify whether to weight time steps by estimated grid expansion costs. + See parameter `weight_by_costs` in + :func:`~.tools.temporal_complexity_reduction.get_most_critical_time_steps` + for more information. Default: False. Returns -------- @@ -1408,6 +1427,7 @@ def reinforce( func( edisgo_obj, + reduced_analysis=reduced_analysis, max_while_iterations=max_while_iterations, split_voltage_band=split_voltage_band, without_generator_import=without_generator_import, @@ -1909,7 +1929,7 @@ def import_electromobility( Imports electromobility data and integrates charging points into grid. Electromobility data can be obtained from the `OpenEnergy DataBase - `_ or from self-provided + `_ or from self-provided data. In case you want to use self-provided data, it needs to be generated using the tools `SimBEV `_ (required version: @@ -1941,7 +1961,7 @@ def import_electromobility( * "oedb" Electromobility data is obtained from the `OpenEnergy DataBase - `_. + `_. This option requires that the parameters `scenario` and `engine` are provided. @@ -2124,7 +2144,7 @@ def import_heat_pumps(self, scenario, engine, timeindex=None, import_types=None) between two scenarios: 'eGon2035' and 'eGon100RE'. The data is retrieved from the - `open energy platform `_. + `open energy platform `_. # ToDo Add information on scenarios and from which tables data is retrieved. @@ -2286,7 +2306,7 @@ def apply_heat_pump_operating_strategy( def import_dsm(self, scenario: str, engine: Engine, timeindex=None): """ Gets industrial and CTS DSM profiles from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. Profiles comprise minimum and maximum load increase in MW as well as maximum energy pre- and postponing in MWh. The data is written to the @@ -2337,7 +2357,7 @@ def import_home_batteries( between two scenarios: 'eGon2035' and 'eGon100RE'. The data is retrieved from the - `open energy platform `_. + `open energy platform `_. The batteries are integrated into the grid (added to :attr:`~.network.topology.Topology.storage_units_df`) based on their building diff --git a/edisgo/flex_opt/check_tech_constraints.py b/edisgo/flex_opt/check_tech_constraints.py index 37b5c2ba..e676d615 100644 --- a/edisgo/flex_opt/check_tech_constraints.py +++ b/edisgo/flex_opt/check_tech_constraints.py @@ -764,7 +764,7 @@ def stations_relative_load(edisgo_obj, grids=None, n_minus_one=False): except Exception: pass - return loading / allowed_loading.loc[:, loading.columns] + return loading / allowed_loading.loc[loading.index, loading.columns] def components_relative_load(edisgo_obj, n_minus_one=False): @@ -1246,6 +1246,10 @@ def voltage_deviation_from_allowed_voltage_limits( v_dev_allowed_upper, v_dev_allowed_lower = allowed_voltage_limits( edisgo_obj, buses=buses, split_voltage_band=split_voltage_band ) + # the following is needed in case the power flow was only conducted for one LV + # grid - voltage at station node cannot be checked, warning is already raised + # in allowed_voltage_limits() + buses = v_dev_allowed_upper.columns # get voltages from power flow analysis v_mag_pu_pfa = edisgo_obj.results.v_res.loc[:, buses] diff --git a/edisgo/flex_opt/costs.py b/edisgo/flex_opt/costs.py index 8bd63d9b..7cbe359b 100644 --- a/edisgo/flex_opt/costs.py +++ b/edisgo/flex_opt/costs.py @@ -1,3 +1,4 @@ +import logging import os import pandas as pd @@ -7,6 +8,8 @@ from edisgo.tools.geo import proj2equidistant +logger = logging.getLogger(__name__) + def grid_expansion_costs(edisgo_obj, without_generator_import=False): """ @@ -67,7 +70,8 @@ def _get_transformer_costs(trafos): costs_trafos = pd.DataFrame( { "costs_transformers": len(hvmv_trafos) - * [float(edisgo_obj.config["costs_transformers"]["mv"])] + * [float(edisgo_obj.config["costs_transformers"]["mv"])], + "voltage_level": len(hvmv_trafos) * ["hv/mv"], }, index=hvmv_trafos, ) @@ -77,13 +81,14 @@ def _get_transformer_costs(trafos): pd.DataFrame( { "costs_transformers": len(mvlv_trafos) - * [float(edisgo_obj.config["costs_transformers"]["lv"])] + * [float(edisgo_obj.config["costs_transformers"]["lv"])], + "voltage_level": len(mvlv_trafos) * ["mv/lv"], }, index=mvlv_trafos, ), ] ) - return costs_trafos.loc[trafos.index, "costs_transformers"].values + return costs_trafos.loc[trafos.index, :] def _get_line_costs(lines_added): costs_lines = line_expansion_costs(edisgo_obj, lines_added.index) @@ -107,9 +112,8 @@ def _get_line_costs(lines_added): # costs for transformers if not equipment_changes.empty: transformers = equipment_changes[ - equipment_changes.index.isin( - [f"{_}_station" for _ in edisgo_obj.topology._grids_repr] - ) + equipment_changes.equipment.str.contains("Transformer") + | equipment_changes.equipment.str.contains("transformer") ] added_transformers = transformers[transformers["change"] == "added"] removed_transformers = transformers[transformers["change"] == "removed"] @@ -129,15 +133,16 @@ def _get_line_costs(lines_added): ) trafos = all_trafos.loc[added_transformers["equipment"]] # calculate costs for each transformer + transformer_costs = _get_transformer_costs(trafos) costs = pd.concat( [ costs, pd.DataFrame( { "type": trafos.type_info.values, - "total_costs": _get_transformer_costs(trafos), + "total_costs": transformer_costs.costs_transformers, "quantity": len(trafos) * [1], - "voltage_level": len(trafos) * ["mv/lv"], + "voltage_level": transformer_costs.voltage_level, }, index=trafos.index, ), @@ -161,6 +166,19 @@ def _get_line_costs(lines_added): .sum() .loc[lines_added_unique, ["quantity"]] ) + # use the minimum of quantity and num_parallel, as sometimes lines are added + # and in a next reinforcement step removed again, e.g. when feeder is split + # at 2/3 and a new single standard line is added + lines_added = pd.merge( + lines_added, + edisgo_obj.topology.lines_df.loc[:, ["num_parallel"]], + how="left", + left_index=True, + right_index=True, + ) + lines_added["quantity_added"] = lines_added.loc[ + :, ["quantity", "num_parallel"] + ].min(axis=1) lines_added["length"] = edisgo_obj.topology.lines_df.loc[ lines_added.index, "length" ] @@ -176,9 +194,9 @@ def _get_line_costs(lines_added): ].values, "total_costs": line_costs.costs.values, "length": ( - lines_added.quantity * lines_added.length + lines_added.quantity_added * lines_added.length ).values, - "quantity": lines_added.quantity.values, + "quantity": lines_added.quantity_added.values, "voltage_level": line_costs.voltage_level.values, }, index=lines_added.index, @@ -288,3 +306,69 @@ def line_expansion_costs(edisgo_obj, lines_names=None): ] ) return costs_lines.loc[lines_df.index] + + +def transformer_expansion_costs(edisgo_obj, transformer_names=None): + """ + Returns costs per transformer in kEUR as well as voltage level they are in. + + Parameters + ----------- + edisgo_obj : :class:`~.EDisGo` + eDisGo object + transformer_names: None or list(str) + List of names of transformers to return cost information for. If None, it is + returned for all transformers in + :attr:`~.network.topology.Topology.transformers_df` and + :attr:`~.network.topology.Topology.transformers_hvmv_df`. + + Returns + ------- + costs: :pandas:`pandas.DataFrame` + Dataframe with names of transformers in index and columns 'costs' with + costs per transformer in kEUR and 'voltage_level' with information on voltage + level the transformer is in. + + """ + transformers_df = pd.concat( + [ + edisgo_obj.topology.transformers_df.copy(), + edisgo_obj.topology.transformers_hvmv_df.copy(), + ] + ) + if transformer_names is not None: + transformers_df = transformers_df.loc[transformer_names, ["type_info"]] + + if len(transformers_df) == 0: + return pd.DataFrame(columns=["costs", "voltage_level"]) + + hvmv_transformers = transformers_df[ + transformers_df.index.isin(edisgo_obj.topology.transformers_hvmv_df.index) + ].index + mvlv_transformers = transformers_df[ + transformers_df.index.isin(edisgo_obj.topology.transformers_df.index) + ].index + + costs_hvmv = float(edisgo_obj.config["costs_transformers"]["mv"]) + costs_mvlv = float(edisgo_obj.config["costs_transformers"]["lv"]) + + costs_df = pd.DataFrame( + { + "costs": costs_hvmv, + "voltage_level": "hv/mv", + }, + index=hvmv_transformers, + ) + costs_df = pd.concat( + [ + costs_df, + pd.DataFrame( + { + "costs": costs_mvlv, + "voltage_level": "mv/lv", + }, + index=mvlv_transformers, + ), + ] + ) + return costs_df diff --git a/edisgo/flex_opt/curtailment.py b/edisgo/flex_opt/curtailment.py deleted file mode 100644 index 484df444..00000000 --- a/edisgo/flex_opt/curtailment.py +++ /dev/null @@ -1,782 +0,0 @@ -import logging - -import pandas as pd - -from pyomo.environ import ( - ConcreteModel, - Constraint, - Objective, - Param, - Set, - Var, - minimize, -) -from pyomo.opt import SolverFactory - -from edisgo.io import pypsa_io - - -def voltage_based( - feedin, generators, curtailment_timeseries, edisgo, curtailment_key, **kwargs -): - """ - Implements curtailment methodology 'voltage-based'. - - ToDo: adapt to refactored code! - - The curtailment that has to be met in each time step is allocated depending - on the exceedance of the allowed voltage deviation at the nodes of the - generators. The higher the exceedance, the higher the curtailment. - - The optional parameter `voltage_threshold` specifies the threshold for the - exceedance of the allowed voltage deviation above which a generator is - curtailed. By default it is set to zero, meaning that all generators at - nodes with voltage deviations that exceed the allowed voltage deviation are - curtailed. Generators at nodes where the allowed voltage deviation is not - exceeded are not curtailed. In the case that the required curtailment - exceeds the weather-dependent availability of all generators with voltage - deviations above the specified threshold, the voltage threshold is lowered - in steps of 0.01 p.u. until the curtailment target can be met. - - Above the threshold, the curtailment is proportional to the exceedance of - the allowed voltage deviation. In order to find the linear relation between - the curtailment and the voltage difference a linear problem is formulated - and solved using the python package pyomo. See documentation for further - information. - - Parameters - ---------- - feedin : :pandas:`pandas.DataFrame` - Dataframe holding the feed-in of each generator in kW for the - technology (and weather cell) specified in `curtailment_key` parameter. - Index of the dataframe is a - :pandas:`pandas.DatetimeIndex`. Columns are the - representatives of the fluctuating generators. - generators : :pandas:`pandas.DataFrame` - Dataframe with all generators of the type (and in weather cell) - specified in `curtailment_key` parameter. See return value of - :func:`edisgo.network.tools.get_gen_info` for more information. - curtailment_timeseries : :pandas:`pandas.Series` - The curtailment in kW to be distributed amongst the generators in - `generators` parameter. Index of the series is a - :pandas:`pandas.DatetimeIndex`. - edisgo : :class:`~.edisgo.EDisGo` - curtailment_key : :obj:`str` or :obj:`tuple` with :obj:`str` - The technology and weather cell ID if :obj:`tuple` or only - the technology if :obj:`str` the curtailment is specified for. - voltage_threshold: :obj:`float` - The node voltage below which no curtailment is assigned to the - respective generator if not necessary. Default: 0.0. - solver: :obj:`str` - The solver used to optimize the curtailment assigned to the generator. - Possible options are: - - * 'cbc' - coin-or branch and cut solver - * 'glpk' - gnu linear programming kit solver - * any other available compatible with 'pyomo' like 'gurobi' - or 'cplex' - - Default: 'cbc' - - """ - - raise NotImplementedError - - voltage_threshold = pd.Series( - kwargs.get("voltage_threshold", 0.0), - index=curtailment_timeseries.index, - ) - solver = kwargs.get("solver", "cbc") - combined_analysis = kwargs.get("combined_analysis", False) - - # get the voltages at the generators - if not edisgo.network.pypsa.edisgo_mode: - voltages_lv_gens = edisgo.network.results.v_res( - nodes_df=generators.loc[(generators.voltage_level == "lv")].index, - level="lv", - ) - else: - # if only MV topology was analyzed (edisgo_mode = 'mv') all LV - # generators are assigned the voltage at the corresponding station's - # primary side - lv_gens = generators[generators.voltage_level == "lv"] - voltages_lv_stations = edisgo.network.results.v_res( - nodes_df=[_.station for _ in lv_gens.grid.unique()], level="mv" - ) - voltages_lv_gens = pd.DataFrame() - for lv_gen in lv_gens.index: - voltages_lv_gens[repr(lv_gen)] = voltages_lv_stations[ - repr(lv_gen.grid.station) - ] - voltages_mv_gens = edisgo.network.results.v_res( - nodes_df=generators.loc[(generators.voltage_level == "mv")].index, - level="mv", - ) - voltages_gens = voltages_lv_gens.join(voltages_mv_gens) - - # get allowed voltage deviations from config - if not combined_analysis: - allowed_voltage_dev_mv = edisgo.network.config[ - "grid_expansion_allowed_voltage_deviations" - ]["mv_feed-in_case_max_v_deviation"] - allowed_voltage_diff_lv = edisgo.network.config[ - "grid_expansion_allowed_voltage_deviations" - ]["lv_feed-in_case_max_v_deviation"] - else: - allowed_voltage_dev_mv = edisgo.network.config[ - "grid_expansion_allowed_voltage_deviations" - ]["mv_lv_feed-in_case_max_v_deviation"] - allowed_voltage_diff_lv = edisgo.network.config[ - "grid_expansion_allowed_voltage_deviations" - ]["mv_lv_feed-in_case_max_v_deviation"] - - # assign allowed voltage deviation to each generator - if not edisgo.network.pypsa.edisgo_mode: - # for edisgo_mode = None - - # get voltages at stations - grids = list(set(generators.grid)) - lv_stations = [_.station for _ in grids if "LVStation" in repr(_.station)] - voltage_lv_stations = edisgo.network.results.v_res( - nodes_df=lv_stations, level="lv" - ) - voltages_mv_station = edisgo.network.results.v_res( - nodes_df=[edisgo.network.mv_grid.station], level="mv" - ) - voltages_stations = voltage_lv_stations.join(voltages_mv_station) - - # assign allowed voltage deviation - generators["allowed_voltage_dev"] = generators.voltage_level.apply( - lambda _: allowed_voltage_diff_lv if _ == "lv" else allowed_voltage_dev_mv - ) - - # calculate voltage difference from generator node to station - voltage_gens_diff = pd.DataFrame() - for gen in voltages_gens.columns: - station = generators[generators.gen_repr == gen].grid.values[0].station - voltage_gens_diff[gen] = ( - voltages_gens.loc[:, gen] - - voltages_stations.loc[:, repr(station)] - - generators[generators.gen_repr == gen].allowed_voltage_dev.iloc[0] - ) - - else: - # for edisgo_mode = 'mv' - - station = edisgo.network.mv_grid.station - # get voltages at HV/MV station - voltages_station = edisgo.network.results.v_res(nodes_df=[station], level="mv") - - # assign allowed voltage deviation - generators["allowed_voltage_dev"] = allowed_voltage_dev_mv - - # calculate voltage difference from generator node to station - voltage_gens_diff = pd.DataFrame() - for gen in voltages_gens.columns: - voltage_gens_diff[gen] = ( - voltages_gens.loc[:, gen] - - voltages_station.loc[:, repr(station)] - - generators[generators.gen_repr == gen].allowed_voltage_dev.iloc[0] - ) - - # for every time step check if curtailment can be fulfilled, otherwise - # reduce voltage threshold; set feed-in of generators below voltage - # threshold to zero, so that they cannot be curtailed - for ts in curtailment_timeseries.index: - # get generators with voltage higher than threshold - gen_pool = voltage_gens_diff.loc[ - ts, voltage_gens_diff.loc[ts, :] > voltage_threshold.loc[ts] - ].index - # if curtailment cannot be fulfilled lower voltage threshold - while sum(feedin.loc[ts, gen_pool]) < curtailment_timeseries.loc[ts]: - voltage_threshold.loc[ts] = voltage_threshold.loc[ts] - 0.01 - gen_pool = voltage_gens_diff.loc[ - ts, voltage_gens_diff.loc[ts, :] > voltage_threshold.loc[ts] - ].index - # set feed-in of generators below voltage threshold to zero, so that - # they cannot be curtailed - gen_pool_out = voltage_gens_diff.loc[ - ts, voltage_gens_diff.loc[ts, :] <= voltage_threshold.loc[ts] - ].index - feedin.loc[ts, gen_pool_out] = 0 - - # only optimize for time steps where curtailment is greater than zero - timeindex = curtailment_timeseries[curtailment_timeseries > 0].index - if not timeindex.empty: - curtailment = _optimize_voltage_based_curtailment( - feedin, - voltage_gens_diff, - curtailment_timeseries, - voltage_threshold, - timeindex, - solver, - ) - else: - curtailment = pd.DataFrame() - - # set curtailment for other time steps to zero - curtailment = pd.concat( - [ - curtailment, - pd.DataFrame( - 0, - columns=feedin.columns, - index=curtailment_timeseries[curtailment_timeseries <= 0].index, - ), - ] - ) - - # check if curtailment target was met - _check_curtailment_target(curtailment, curtailment_timeseries, curtailment_key) - - # assign curtailment to individual generators - _assign_curtailment(curtailment, edisgo, generators, curtailment_key) - - -def _optimize_voltage_based_curtailment( - feedin, voltage_pu, total_curtailment, voltage_threshold, timeindex, solver -): - """ - Formulates and solves linear problem to find linear relation between - curtailment and node voltage. - - ToDo: adapt to refactored code! - - Parameters - ------------ - feedin : :pandas:`pandas.DataFrame` - See `feedin` parameter in - :func:`edisgo.flex_opt.curtailment.voltage_based` for more information. - voltage_pu : :pandas:`pandas.DataFrame - Dataframe containing voltages in p.u. at the generator nodes. Index - of the dataframe is a :pandas:`pandas.DatetimeIndex`, - columns are the generator representatives. - total_curtailment : :pandas:`pandas.Series` - Series containing the specific curtailment in kW to be allocated to the - generators. The index is a - :pandas:`pandas.DatetimeIndex`. - voltage_threshold : :pandas:`pandas.Series` - Series containing the voltage thresholds in p.u. below which no - generator curtailment will occur. The index is a - :pandas:`pandas.DatetimeIndex`. - solver : :obj:`str` - The solver used to optimize the linear problem. Default: 'cbc'. - - Returns - ------- - :pandas:`pandas.DataFrame` - Dataframe containing the curtailment in kW per generator and time step - feed-in was provided for in `feedin` parameter. Index is a - :pandas:`pandas.DatetimeIndex`, columns are the - generator representatives. - - """ - - raise NotImplementedError - - logging.debug("Start curtailment optimization.") - - v_max = voltage_pu.max(axis=1) - generators = feedin.columns - - # additional curtailment factors - cf_add = pd.DataFrame(index=timeindex) - for gen in generators: - cf_add[gen] = abs( - (voltage_pu.loc[timeindex, gen] - v_max[timeindex]) - / (voltage_threshold[timeindex] - v_max[timeindex]) - ) - - # curtailment factors - cf = pd.DataFrame(index=timeindex) - for gen in generators: - cf[gen] = abs( - (voltage_pu.loc[timeindex, gen] - voltage_threshold[timeindex]) - / (v_max[timeindex] - voltage_threshold[timeindex]) - ) - - # initialize model - model = ConcreteModel() - - # add sets - model.T = Set(initialize=timeindex) - model.G = Set(initialize=generators) - - # add parameters - def feedin_init(model, t, g): - return feedin.loc[t, g] - - model.feedin = Param(model.T, model.G, initialize=feedin_init) - - def voltage_pu_init(model, t, g): - return voltage_pu.loc[t, g] - - model.voltage_pu = Param(model.T, model.G, initialize=voltage_pu_init) - - def cf_add_init(model, t, g): - return cf_add.loc[t, g] - - model.cf_add = Param(model.T, model.G, initialize=cf_add_init) - - def cf_init(model, t, g): - return cf.loc[t, g] - - model.cf = Param(model.T, model.G, initialize=cf_init) - - def total_curtailment_init(model, t): - return total_curtailment.loc[t] - - model.total_curtailment = Param(model.T, initialize=total_curtailment_init) - - # add variables - model.offset = Var(model.T, bounds=(0, 1)) - model.cf_max = Var(model.T, bounds=(0, 1)) - - def curtailment_init(model, t, g): - return (0, feedin.loc[t, g]) - - model.c = Var(model.T, model.G, bounds=curtailment_init) - - # add objective - def obj_rule(model): - expr = sum(model.offset[t] * 100 for t in model.T) - return expr - - model.obj = Objective(rule=obj_rule, sense=minimize) - - # add constraints - # curtailment per generator constraints - def curtail(model, t, g): - return ( - model.cf[t, g] * model.cf_max[t] * model.feedin[t, g] - + model.cf_add[t, g] * model.offset[t] * model.feedin[t, g] - - model.c[t, g] - == 0 - ) - - model.curtailment = Constraint(model.T, model.G, rule=curtail) - - # total curtailment constraint - def total_curtailment(model, t): - return sum(model.c[t, g] for g in model.G) == model.total_curtailment[t] - - model.sum_curtailment = Constraint(model.T, rule=total_curtailment) - - # solve - solver = SolverFactory(solver) - results = solver.solve(model, tee=False) - - # load results back into model - model.solutions.load_from(results) - - return pd.DataFrame( - {g: [model.c[t, g].value for t in model.T] for g in model.G}, - index=model.T, - ) - - -def feedin_proportional( - feedin, generators, curtailment_timeseries, edisgo, curtailment_key, **kwargs -): - """ - Implements curtailment methodology 'feedin-proportional'. - - ToDo: adapt to refactored code! - - The curtailment that has to be met in each time step is allocated - equally to all generators depending on their share of total - feed-in in that time step. - - Parameters - ---------- - feedin : :pandas:`pandas.DataFrame` - Dataframe holding the feed-in of each generator in kW for the - technology (and weather cell) specified in `curtailment_key` parameter. - Index of the dataframe is a - :pandas:`pandas.DatetimeIndex`. Columns are the - representatives of the fluctuating generators. - generators : :pandas:`pandas.DataFrame` - Dataframe with all generators of the type (and in weather cell) - specified in `curtailment_key` parameter. See return value of - :func:`edisgo.network.tools.get_gen_info` for more information. - curtailment_timeseries : :pandas:`pandas.Series` - The curtailment in kW to be distributed amongst the generators in - `generators` parameter. Index of the series is a - :pandas:`pandas.DatetimeIndex`. - edisgo : :class:`~.edisgo.EDisGo` - curtailment_key::obj:`str` or :obj:`tuple` with :obj:`str` - The technology and weather cell ID if :obj:`tuple` or only - the technology if :obj:`str` the curtailment is specified for. - - """ - raise NotImplementedError - - # calculate curtailment in each time step of each generator - curtailment = feedin.divide(feedin.sum(axis=1), axis=0).multiply( - curtailment_timeseries, axis=0 - ) - - # substitute NaNs from division with 0 by 0 - curtailment.fillna(0, inplace=True) - - # check if curtailment target was met - _check_curtailment_target(curtailment, curtailment_timeseries, curtailment_key) - - # assign curtailment to individual generators - _assign_curtailment(curtailment, edisgo, generators, curtailment_key) - - -def _check_curtailment_target(curtailment, curtailment_target, curtailment_key): - """ - Raises an error if curtailment target was not met in any time step. - - ToDo: adapt to refactored code! - - Parameters - ----------- - curtailment : :pandas:`pandas.DataFrame` - Dataframe containing the curtailment in kW per generator and time step. - Index is a :pandas:`pandas.DatetimeIndex`, columns are - the generator representatives. - curtailment_target : :pandas:`pandas.Series` - The curtailment in kW that was to be distributed amongst the - generators. Index of the series is a - :pandas:`pandas.DatetimeIndex`. - curtailment_key : :obj:`str` or :obj:`tuple` with :obj:`str` - The technology and weather cell ID if :obj:`tuple` or only - the technology if :obj:`str` the curtailment was specified for. - - """ - raise NotImplementedError - - if not (abs(curtailment.sum(axis=1) - curtailment_target) < 1e-1).all(): - message = "Curtailment target not met for {}.".format(curtailment_key) - logging.error(message) - raise TypeError(message) - - -def _assign_curtailment(curtailment, edisgo, generators, curtailment_key): - """ - Helper function to write curtailment time series to generator objects. - - ToDo: adapt to refactored code! - - This function also writes a list of the curtailed generators to curtailment - in :class:`edisgo.network.network.TimeSeries` and - :class:`edisgo.network.network.Results`. - - Parameters - ---------- - curtailment : :pandas:`pandas.DataFrame` - Dataframe containing the curtailment in kW per generator and time step - for all generators of the type (and in weather cell) specified in - `curtailment_key` parameter. Index is a - :pandas:`pandas.DatetimeIndex`, columns are the - generator representatives. - edisgo : :class:`~.edisgo.EDisGo` - generators : :pandas:`pandas.DataFrame` - Dataframe with all generators of the type (and in weather cell) - specified in `curtailment_key` parameter. See return value of - :func:`edisgo.network.tools.get_gen_info` for more information. - curtailment_key : :obj:`str` or :obj:`tuple` with :obj:`str` - The technology and weather cell ID if :obj:`tuple` or only - the technology if :obj:`str` the curtailment is specified for. - - """ - raise NotImplementedError - - gen_object_list = [] - for gen in curtailment.columns: - # get generator object from representative - gen_object = generators.loc[generators.gen_repr == gen].index[0] - # assign curtailment to individual generators - gen_object.curtailment = curtailment.loc[:, gen] - gen_object_list.append(gen_object) - - # set timeseries.curtailment - if edisgo.network.timeseries._curtailment: - edisgo.network.timeseries._curtailment.extend(gen_object_list) - edisgo.network.results._curtailment[curtailment_key] = gen_object_list - else: - edisgo.network.timeseries._curtailment = gen_object_list - # list needs to be copied, otherwise it will be extended every time - # a new key is added to results._curtailment - edisgo.network.results._curtailment = {curtailment_key: gen_object_list.copy()} - - -class CurtailmentControl: - """ - Allocates given curtailment targets to solar and wind generators. - - ToDo: adapt to refactored code! - - Parameters - ---------- - edisgo: :class:`edisgo.EDisGo` - The parent EDisGo object that this instance is a part of. - methodology : :obj:`str` - Defines the curtailment strategy. Possible options are: - - * 'feedin-proportional' - The curtailment that has to be met in each time step is allocated - equally to all generators depending on their share of total - feed-in in that time step. For more information see - :func:`edisgo.flex_opt.curtailment.feedin_proportional`. - * 'voltage-based' - The curtailment that has to be met in each time step is allocated - based on the voltages at the generator connection points and a - defined voltage threshold. Generators at higher voltages - are curtailed more. The default voltage threshold is 1.0 but - can be changed by providing the argument 'voltage_threshold'. This - method formulates the allocation of curtailment as a linear - optimization problem using :py:mod:`Pyomo` and requires a linear - programming solver like coin-or cbc (cbc) or gnu linear programming - kit (glpk). The solver can be specified through the parameter - 'solver'. For more information see - :func:`edisgo.flex_opt.curtailment.voltage_based`. - - curtailment_timeseries : :pandas:`pandas.Series` or \ - :pandas:`pandas.DataFrame`, optional - Series or DataFrame containing the curtailment time series in kW. Index - needs to be a :pandas:`pandas.DatetimeIndex`. - Provide a Series if the curtailment time series applies to wind and - solar generators. Provide a DataFrame if the curtailment time series - applies to a specific technology and optionally weather cell. In the - first case columns of the DataFrame are e.g. 'solar' and 'wind'; in the - second case columns need to be a - :pandas:`pandas.MultiIndex` with the first level containing - the type and the second level the weather cell ID. Default: None. - solver: :obj:`str` - The solver used to optimize the curtailment assigned to the generators - when 'voltage-based' curtailment methodology is chosen. - Possible options are: - - * 'cbc' - * 'glpk' - * any other available solver compatible with 'pyomo' such as 'gurobi' - or 'cplex' - - Default: 'cbc'. - voltage_threshold : :obj:`float` - Voltage below which no curtailment is assigned to the respective - generator if not necessary when 'voltage-based' curtailment methodology - is chosen. See :func:`edisgo.flex_opt.curtailment.voltage_based` for - more information. Default: 1.0. - mode : :obj:`str` - The `mode` is only relevant for curtailment method 'voltage-based'. - Possible options are None and 'mv'. Per default `mode` is None in which - case a power flow is conducted for both the MV and LV. In case `mode` - is set to 'mv' components in underlying LV grids are considered - aggregative. Default: None. - - """ - - # ToDo move some properties from topology here (e.g. peak_load, generators,...) - def __init__( - self, edisgo, methodology, curtailment_timeseries, mode=None, **kwargs - ): - raise NotImplementedError - - logging.info("Start curtailment methodology {}.".format(methodology)) - - self._check_timeindex(curtailment_timeseries, edisgo.topology) - - if methodology == "feedin-proportional": - curtailment_method = feedin_proportional - elif methodology == "voltage-based": - curtailment_method = voltage_based - else: - raise ValueError( - "{} is not a valid curtailment methodology.".format(methodology) - ) - - # check if provided mode is valid - if mode and mode != "mv": - raise ValueError("Provided mode {} is not a valid mode.") - - # get all fluctuating generators and their attributes (weather ID, - # type, etc.) - # TODO: Function get_gen_info does not exist - generators = get_gen_info( # noqa: F821 - edisgo.topology, "mvlv", fluctuating=True - ) - - # do analyze to get all voltages at generators and feed-in dataframe - edisgo.analyze(mode=mode) - - # get feed-in time series of all generators - if not mode: - feedin = edisgo.topology.pypsa.generators_t.p * 1000 - # drop dispatchable generators and slack generator - drop_labels = [ - _ for _ in feedin.columns if "GeneratorFluctuating" not in _ - ] + ["Generator_slack"] - else: - feedin = edisgo.topology.mv_grid.generators_timeseries() - for grid in edisgo.topology.mv_grid.lv_grids: - feedin = pd.concat([feedin, grid.generators_timeseries()], axis=1) - feedin.rename(columns=lambda _: repr(_), inplace=True) - # drop dispatchable generators - drop_labels = [_ for _ in feedin.columns if "GeneratorFluctuating" not in _] - feedin.drop(labels=drop_labels, axis=1, inplace=True) - - if isinstance(curtailment_timeseries, pd.Series): - # check if curtailment exceeds feed-in - self._precheck(curtailment_timeseries, feedin, "all_fluctuating_generators") - - # do curtailment - curtailment_method( - feedin, - generators, - curtailment_timeseries, - edisgo, - "all_fluctuating_generators", - **kwargs - ) - - elif isinstance(curtailment_timeseries, pd.DataFrame): - for col in curtailment_timeseries.columns: - logging.debug("Calculating curtailment for {}".format(col)) - - # filter generators - if isinstance(curtailment_timeseries.columns, pd.MultiIndex): - selected_generators = generators.loc[ - (generators.type == col[0]) - & (generators.weather_cell_id == col[1]) - ] - else: - selected_generators = generators.loc[(generators.type == col)] - - # check if curtailment exceeds feed-in - feedin_selected_generators = feedin.loc[ - :, selected_generators.gen_repr.values - ] - self._precheck( - curtailment_timeseries.loc[:, col], - feedin_selected_generators, - col, - ) - - # do curtailment - if not feedin_selected_generators.empty: - curtailment_method( - feedin_selected_generators, - selected_generators, - curtailment_timeseries.loc[:, col], - edisgo, - col, - **kwargs - ) - - # check if curtailment exceeds feed-in - self._postcheck(edisgo.topology, feedin) - - # update generator time series in pypsa topology - if edisgo.topology.pypsa is not None: - pypsa_io.update_pypsa_generator_timeseries(edisgo.topology) - - # add measure to Results object - edisgo.results.measures = "curtailment" - - def _check_timeindex(self, curtailment_timeseries, network): - """ - Raises an error if time index of curtailment time series does not - comply with the time index of load and feed-in time series. - - Parameters - ----------- - curtailment_timeseries : :pandas:`pandas.Series` or \ - :pandas:`pandas.DataFrame` - See parameter `curtailment_timeseries` in class definition for more - information. - - """ - raise NotImplementedError - - if curtailment_timeseries is None: - message = "No curtailment given." - logging.error(message) - raise KeyError(message) - try: - curtailment_timeseries.loc[network.timeseries.timeindex] - except Exception: - message = ( - "Time index of curtailment time series does not match " - "with load and feed-in time series." - ) - logging.error(message) - raise KeyError(message) - - def _precheck(self, curtailment_timeseries, feedin_df, curtailment_key): - """ - Raises an error if the curtailment at any time step exceeds the - total feed-in of all generators curtailment can be distributed among - at that time. - - Parameters - ----------- - curtailment_timeseries : :pandas:`pandas.Series` - Curtailment time series in kW for the technology (and weather - cell) specified in `curtailment_key`. - feedin_df : :pandas:`pandas.Series` - Feed-in time series in kW for all generators of type (and in - weather cell) specified in `curtailment_key`. - curtailment_key : :obj:`str` or :obj:`tuple` with :obj:`str` - Technology (and weather cell) curtailment is given for. - - """ - raise NotImplementedError - - if not feedin_df.empty: - feedin_selected_sum = feedin_df.sum(axis=1) - diff = feedin_selected_sum - curtailment_timeseries - # add tolerance (set small negative values to zero) - diff[diff.between(-1, 0)] = 0 - if not (diff >= 0).all(): - bad_time_steps = [_ for _ in diff.index if diff[_] < 0] - message = ( - "Curtailment demand exceeds total feed-in in time " - "steps {}.".format(bad_time_steps) - ) - logging.error(message) - raise ValueError(message) - else: - bad_time_steps = [ - _ for _ in curtailment_timeseries.index if curtailment_timeseries[_] > 0 - ] - if bad_time_steps: - message = ( - "Curtailment given for time steps {} but there " - "are no generators to meet the curtailment target " - "for {}.".format(bad_time_steps, curtailment_key) - ) - logging.error(message) - raise ValueError(message) - - def _postcheck(self, network, feedin): - """ - Raises an error if the curtailment of a generator exceeds the - feed-in of that generator at any time step. - - Parameters - ----------- - network : :class:`~.network.topology.Topology` - feedin : :pandas:`pandas.DataFrame` - DataFrame with feed-in time series in kW. Columns of the dataframe - are :class:`~.network.components.GeneratorFluctuating`, index is - time index. - - """ - raise NotImplementedError - - curtailment = network.timeseries.curtailment - gen_repr = [repr(_) for _ in curtailment.columns] - feedin_repr = feedin.loc[:, gen_repr] - curtailment_repr = curtailment - curtailment_repr.columns = gen_repr - if not ((feedin_repr - curtailment_repr) > -1e-1).all().all(): - message = "Curtailment exceeds feed-in." - logging.error(message) - raise TypeError(message) diff --git a/edisgo/flex_opt/q_control.py b/edisgo/flex_opt/q_control.py index a6e98578..07183a7d 100644 --- a/edisgo/flex_opt/q_control.py +++ b/edisgo/flex_opt/q_control.py @@ -92,22 +92,6 @@ def fixed_cosphi(active_power, q_sign, power_factor): return active_power * q_sign * np.tan(np.arccos(power_factor)) -def _get_component_dict(): - """ - Helper function to translate from component type term used in function to the one - used in the config files. - - """ - comp_dict = { - "generators": "gen", - "storage_units": "storage", - "conventional_loads": "load", - "charging_points": "cp", - "heat_pumps": "hp", - } - return comp_dict - - def _fixed_cosphi_default_power_factor(comp_df, component_type, configs): """ Gets fixed cosphi default reactive power factor for each given component. @@ -116,15 +100,15 @@ def _fixed_cosphi_default_power_factor(comp_df, component_type, configs): ----------- comp_df : :pandas:`pandas.DataFrame` Dataframe with component names (in the index) of all components - reactive power factor needs to be set. Only required column is + reactive power factor needs to be set for. Only required column is column 'voltage_level', giving the voltage level the component is in (the voltage level can be set using the function :func:`~.tools.tools.assign_voltage_level_to_component`). All components must have the same `component_type`. component_type : str The component type determines the reactive power factor and mode used. - Possible options are 'generators', 'storage_units', 'conventional_loads', - 'charging_points', and 'heat_pumps'. + Possible options are 'generator', 'storage_unit', 'conventional_load', + 'charging_point', and 'heat_pump'. configs : :class:`~.tools.config.Config` eDisGo configuration data. @@ -136,22 +120,28 @@ def _fixed_cosphi_default_power_factor(comp_df, component_type, configs): """ reactive_power_factor = configs["reactive_power_factor"] - comp_dict = _get_component_dict() - - if component_type in comp_dict.keys(): - comp = comp_dict[component_type] + allowed_types = [ + "generator", + "storage_unit", + "conventional_load", + "charging_point", + "heat_pump", + ] + if component_type in allowed_types: # write series with power factor for each component power_factor = pd.Series(index=comp_df.index, dtype=float) for voltage_level in comp_df.voltage_level.unique(): cols = comp_df.index[comp_df.voltage_level == voltage_level] if len(cols) > 0: - power_factor[cols] = reactive_power_factor[f"{voltage_level}_{comp}"] + power_factor[cols] = reactive_power_factor[ + f"{voltage_level}_{component_type}" + ] return power_factor else: raise ValueError( "Given 'component_type' is not valid. Valid options are " - "'generators','storage_units', 'conventional_loads', 'charging_points', " - "and 'heat_pumps'." + "'generator', 'storage_unit', 'conventional_load', 'charging_point', " + "and 'heat_pump'." ) @@ -170,8 +160,8 @@ def _fixed_cosphi_default_reactive_power_sign(comp_df, component_type, configs): All components must have the same `component_type`. component_type : str The component type determines the reactive power factor and mode used. - Possible options are 'generators', 'storage_units', 'conventional_loads', - 'charging_points', and 'heat_pumps'. + Possible options are 'generator', 'storage_unit', 'conventional_load', + 'charging_point', and 'heat_pump'. configs : :class:`~.tools.config.Config` eDisGo configuration data. @@ -183,17 +173,15 @@ def _fixed_cosphi_default_reactive_power_sign(comp_df, component_type, configs): """ reactive_power_mode = configs["reactive_power_mode"] - comp_dict = _get_component_dict() q_sign_dict = { - "generators": get_q_sign_generator, - "storage_units": get_q_sign_generator, - "conventional_loads": get_q_sign_load, - "charging_points": get_q_sign_load, - "heat_pumps": get_q_sign_load, + "generator": get_q_sign_generator, + "storage_unit": get_q_sign_generator, + "conventional_load": get_q_sign_load, + "charging_point": get_q_sign_load, + "heat_pump": get_q_sign_load, } - if component_type in comp_dict.keys(): - comp = comp_dict[component_type] + if component_type in q_sign_dict.keys(): get_q_sign = q_sign_dict[component_type] # write series with power factor for each component q_sign = pd.Series(index=comp_df.index, dtype=float) @@ -201,12 +189,12 @@ def _fixed_cosphi_default_reactive_power_sign(comp_df, component_type, configs): cols = comp_df.index[comp_df.voltage_level == voltage_level] if len(cols) > 0: q_sign[cols] = get_q_sign( - reactive_power_mode[f"{voltage_level}_{comp}"] + reactive_power_mode[f"{voltage_level}_{component_type}"] ) return q_sign else: raise ValueError( "Given 'component_type' is not valid. Valid options are " - "'generators','storage_units', 'conventional_loads', 'charging_points', " - "and 'heat_pumps'." + "'generator', 'storage_unit', 'conventional_load', 'charging_point', " + "and 'heat_pump'." ) diff --git a/edisgo/flex_opt/reinforce_grid.py b/edisgo/flex_opt/reinforce_grid.py index 9632d097..6ddbb45e 100644 --- a/edisgo/flex_opt/reinforce_grid.py +++ b/edisgo/flex_opt/reinforce_grid.py @@ -27,6 +27,7 @@ def reinforce_grid( edisgo: EDisGo, timesteps_pfa: str | pd.DatetimeIndex | pd.Timestamp | None = None, + reduced_analysis: bool = False, max_while_iterations: int = 20, split_voltage_band: bool = True, mode: str | None = None, @@ -49,6 +50,10 @@ def reinforce_grid( timesteps_pfa specifies for which time steps power flow analysis is conducted. See parameter `timesteps_pfa` in function :attr:`~.EDisGo.reinforce` for more information. + reduced_analysis : bool + Specifies, whether to run reinforcement on a subset of time steps that are most + critical. See parameter `reduced_analysis` in function + :attr:`~.EDisGo.reinforce` for more information. max_while_iterations : int Maximum number of times each while loop is conducted. Default: 20. split_voltage_band : bool @@ -87,23 +92,34 @@ def reinforce_grid( reinforce MV/LV stations for LV worst-cases. Default: False. num_steps_loading : int - In case `timesteps_pfa` is set to 'reduced_analysis', this parameter can be used + In case `reduced_analysis` is set to True, this parameter can be used to specify the number of most critical overloading events to consider. If None, `percentage` is used. Default: None. num_steps_voltage : int - In case `timesteps_pfa` is set to 'reduced_analysis', this parameter can be used + In case `reduced_analysis` is set to True, this parameter can be used to specify the number of most critical voltage issues to select. If None, `percentage` is used. Default: None. percentage : float - In case `timesteps_pfa` is set to 'reduced_analysis', this parameter can be used + In case `reduced_analysis` is set to True, this parameter can be used to specify the percentage of most critical time steps to select. The default is 1.0, in which case all most critical time steps are selected. Default: 1.0. use_troubleshooting_mode : bool - In case `timesteps_pfa` is set to 'reduced_analysis', this parameter can be used + In case `reduced_analysis` is set to True, this parameter can be used to specify how to handle non-convergence issues in the power flow analysis. See parameter `use_troubleshooting_mode` in function :attr:`~.EDisGo.reinforce` - for more information. Default: True. + for more information. Default: False. + run_initial_analyze : bool + In case `reduced_analysis` is set to True, this parameter can be + used to specify whether to run an initial analyze to determine most + critical time steps or to use existing results. If set to False, + `use_troubleshooting_mode` is ignored. Default: True. + weight_by_costs : bool + In case `reduced_analysis` is set to True, this parameter can be + used to specify whether to weight time steps by estimated grid expansion costs. + See parameter `weight_by_costs` in + :func:`~.tools.temporal_complexity_reduction.get_most_critical_time_steps` + for more information. Default: False. Returns ------- @@ -139,14 +155,6 @@ def reinforce_grid( snapshots["min_residual_load"], ] ).dropna() - elif isinstance(timesteps_pfa, str) and timesteps_pfa == "reduced_analysis": - timesteps_pfa = get_most_critical_time_steps( - edisgo, - num_steps_loading=kwargs.get("num_steps_loading", None), - num_steps_voltage=kwargs.get("num_steps_voltage", None), - percentage=kwargs.get("percentage", 1.0), - use_troubleshooting_mode=kwargs.get("use_troubleshooting_mode", True), - ) # if timesteps_pfa is not of type datetime or does not contain # datetimes throw an error elif not isinstance(timesteps_pfa, datetime.datetime): @@ -170,6 +178,24 @@ def reinforce_grid( else: analyze_mode = mode + if reduced_analysis: + timesteps_pfa = get_most_critical_time_steps( + edisgo, + mode=analyze_mode, + timesteps=timesteps_pfa, + lv_grid_id=lv_grid_id, + scale_timeseries=scale_timeseries, + num_steps_loading=kwargs.get("num_steps_loading", None), + num_steps_voltage=kwargs.get("num_steps_voltage", None), + percentage=kwargs.get("percentage", 1.0), + use_troubleshooting_mode=kwargs.get("use_troubleshooting_mode", False), + run_initial_analyze=kwargs.get("run_initial_analyze", True), + weight_by_costs=kwargs.get("weight_by_costs", False), + ) + if timesteps_pfa is not None and len(timesteps_pfa) == 0: + logger.debug("Zero time steps for grid reinforcement.") + return edisgo.results + edisgo.analyze( mode=analyze_mode, timesteps=timesteps_pfa, @@ -740,7 +766,7 @@ def catch_convergence_reinforce_grid( Reinforcement strategy to reinforce grids with non-converging time steps. First, conducts a grid reinforcement with only converging time steps. - Afterwards, tries to run reinforcement with all time steps that did not converge + Afterward, tries to run reinforcement with all time steps that did not converge in the beginning. At last, if there are still time steps that do not converge, the feed-in and load time series are iteratively scaled and the grid reinforced, starting with a low grid load and scaling-up the time series until the original @@ -820,14 +846,16 @@ def reinforce(): selected_timesteps = converging_timesteps reinforce() - # Run reinforcement for time steps that did not converge after initial reinforcement - if not non_converging_timesteps.empty: - logger.info( - "Run reinforcement for time steps that did not converge after initial " - "reinforcement." - ) - selected_timesteps = non_converging_timesteps - converged = reinforce() + # Run reinforcement for time steps that did not converge after initial + # reinforcement (only needs to done, when grid was previously reinforced using + # converged time steps, wherefore it is within that if-statement) + if not non_converging_timesteps.empty: + logger.info( + "Run reinforcement for time steps that did not converge after initial " + "reinforcement." + ) + selected_timesteps = non_converging_timesteps + converged = reinforce() if converged: return edisgo.results diff --git a/edisgo/flex_opt/storage_positioning.py b/edisgo/flex_opt/storage_positioning.py deleted file mode 100644 index b0a7015d..00000000 --- a/edisgo/flex_opt/storage_positioning.py +++ /dev/null @@ -1,707 +0,0 @@ -import logging - -from math import ceil, sqrt - -import networkx as nx -import numpy as np -import pandas as pd - -from networkx.algorithms.shortest_paths.weighted import ( - _dijkstra as dijkstra_shortest_path_length, -) - -from edisgo.flex_opt import check_tech_constraints, costs -from edisgo.tools import plots, tools - -logger = logging.getLogger(__name__) - - -def one_storage_per_feeder( - edisgo, storage_timeseries, storage_nominal_power=None, **kwargs -): - """ - Allocates the given storage capacity to multiple smaller storages. - - ToDo: adapt to refactored code! - - For each feeder with load or voltage issues it is checked if integrating a - storage will reduce peaks in the feeder, starting with the feeder with - the highest theoretical network expansion costs. A heuristic approach is used - to estimate storage sizing and siting while storage operation is carried - over from the given storage operation. - - Parameters - ----------- - edisgo : :class:`~.network.network.EDisGo` - storage_timeseries : :pandas:`pandas.DataFrame` - Total active and reactive power time series that will be allocated to - the smaller storages in feeders with load or voltage issues. Columns of - the dataframe are 'p' containing active power time series in kW and 'q' - containing the reactive power time series in kvar. Index is a - :pandas:`pandas.DatetimeIndex`. - storage_nominal_power : :obj:`float` or None - Nominal power in kW that will be allocated to the smaller storages in - feeders with load or voltage issues. If no nominal power is provided - the maximum active power given in `storage_timeseries` is used. - Default: None. - debug : :obj:`Boolean`, optional - If dedug is True a dataframe with storage size and path to storage of - all installed and possibly discarded storages is saved to a csv file - and a plot with all storage positions is created and saved, both to the - current working directory with filename `storage_results_{MVgrid_id}`. - Default: False. - check_costs_reduction : :obj:`Boolean` or :obj:`str`, optional - This parameter specifies when and whether it should be checked if a - storage reduced network expansion costs or not. It can be used as a safety - check but can be quite time consuming. Possible options are: - - * 'each_feeder' - Costs reduction is checked for each feeder. If the storage did not - reduce network expansion costs it is discarded. - * 'once' - Costs reduction is checked after the total storage capacity is - allocated to the feeders. If the storages did not reduce network - expansion costs they are all discarded. - * False - Costs reduction is never checked. - - Default: False. - - """ - - def _feeder_ranking(grid_expansion_costs): - """ - Get feeder ranking from network expansion costs DataFrame. - - MV feeders are ranked descending by network expansion costs that are - attributed to that feeder. - - Parameters - ---------- - grid_expansion_costs : :pandas:`pandas.DataFrame` - grid_expansion_costs DataFrame from :class:`~.network.network.Results` - of the copied edisgo object. - - Returns - ------- - :pandas:`pandas.Series` - Series with ranked MV feeders (in the copied graph) of type - :class:`~.network.components.Line`. Feeders are ranked by total network - expansion costs of all measures conducted in the feeder. The - feeder with the highest costs is in the first row and the feeder - with the lowest costs in the last row. - - """ - return ( - grid_expansion_costs.groupby(["mv_feeder"], sort=False) - .sum() - .reset_index() - .sort_values(by=["total_costs"], ascending=False)["mv_feeder"] - ) - - def _shortest_path(node): - # TODO: LVStation class is not used anymore - # resolve this when storage positioning is refactored - if isinstance(node, LVStation): # noqa: F821 - return len(nx.shortest_path(node.mv_grid.graph, node.mv_grid.station, node)) - else: - return len(nx.shortest_path(node.grid.graph, node.grid.station, node)) - - def _find_battery_node(edisgo, critical_lines_feeder, critical_nodes_feeder): - """ - Evaluates where to install the storage. - - Parameters - ----------- - edisgo : :class:`~.network.network.EDisGo` - The original edisgo object. - critical_lines_feeder : :pandas:`pandas.DataFrame` - Dataframe containing over-loaded lines in MV feeder, their maximum - relative over-loading and the corresponding time step. See - :func:`edisgo.flex_opt.check_tech_constraints.mv_line_overload` for - more information. - critical_nodes_feeder : :obj:`list` - List with all nodes in MV feeder with voltage issues. - - Returns - ------- - :obj:`float` - Node where storage is installed. - - """ - - # if there are overloaded lines in the MV feeder the battery storage - # will be installed at the node farthest away from the MV station - if not critical_lines_feeder.empty: - logger.debug("Storage positioning due to overload.") - # dictionary with nodes and their corresponding path length to - # MV station - path_length_dict = {} - for line in critical_lines_feeder.index: - nodes = line.grid.graph.nodes_from_line(line) - for node in nodes: - path_length_dict[node] = _shortest_path(node) - # return node farthest away - return [ - _ - for _ in path_length_dict - if path_length_dict[_] == max(path_length_dict.values()) - ][0] - - # if there are voltage issues in the MV network the battery storage will - # be installed at the first node in path that exceeds 2/3 of the line - # length from station to critical node with highest voltage deviation - if critical_nodes_feeder: - logger.debug("Storage positioning due to voltage issues.") - node = critical_nodes_feeder[0] - - # get path length from station to critical node - get_weight = lambda u, v, data: data["line"].length # noqa: E731 - path_length = dijkstra_shortest_path_length( - edisgo.network.mv_grid.graph, - edisgo.network.mv_grid.station, - get_weight, - target=node, - ) - - # find first node in path that exceeds 2/3 of the line length - # from station to critical node farthest away from the station - path = nx.shortest_path( - edisgo.network.mv_grid.graph, - edisgo.network.mv_grid.station, - node, - ) - return next(j for j in path if path_length[j] >= path_length[node] * 2 / 3) - - return None - - def _calc_storage_size(edisgo, feeder, max_storage_size): - """ - Calculates storage size that reduces residual load. - - Parameters - ----------- - edisgo : :class:`~.network.network.EDisGo` - The original edisgo object. - feeder : :class:`~.network.components.Line` - MV feeder the storage will be connected to. The line object is an - object from the copied graph. - - Returns - ------- - :obj:`float` - Storage size that reduced the residual load in the feeder. - - """ - step_size = 200 - sizes = [0] + list( - np.arange(p_storage_min, max_storage_size + 0.5 * step_size, step_size) - ) - p_feeder = edisgo.network.results.pfa_p.loc[:, repr(feeder)] - q_feeder = edisgo.network.results.pfa_q.loc[:, repr(feeder)] - p_slack = edisgo.network.pypsa.generators_t.p.loc[:, "Generator_slack"] * 1e3 - - # get sign of p and q - lines = edisgo.network.pypsa.lines.loc[repr(feeder), :] - mv_station_bus = ( - "bus0" - if lines.loc["bus0"] == f"Bus_{repr(edisgo.network.mv_grid.station)}" - else "bus1" - ) - if mv_station_bus == "bus0": - diff = ( - edisgo.network.pypsa.lines_t.p1.loc[:, repr(feeder)] - - edisgo.network.pypsa.lines_t.p0.loc[:, repr(feeder)] - ) - diff_q = ( - edisgo.network.pypsa.lines_t.q1.loc[:, repr(feeder)] - - edisgo.network.pypsa.lines_t.q0.loc[:, repr(feeder)] - ) - else: - diff = ( - edisgo.network.pypsa.lines_t.p0.loc[:, repr(feeder)] - - edisgo.network.pypsa.lines_t.p1.loc[:, repr(feeder)] - ) - diff_q = ( - edisgo.network.pypsa.lines_t.q0.loc[:, repr(feeder)] - - edisgo.network.pypsa.lines_t.q1.loc[:, repr(feeder)] - ) - p_sign = pd.Series([-1 if _ < 0 else 1 for _ in diff], index=p_feeder.index) - q_sign = pd.Series([-1 if _ < 0 else 1 for _ in diff_q], index=p_feeder.index) - - # get allowed load factors per case - lf = { - "feed-in_case": edisgo.network.config["grid_expansion_load_factors"][ - "mv_feed-in_case_line" - ], - "load_case": network.config["grid_expansion_load_factors"][ - "mv_load_case_line" - ], - } - - # calculate maximum apparent power for each storage size to find - # storage size that minimizes apparent power in the feeder - p_feeder = p_feeder.multiply(p_sign) - q_feeder = q_feeder.multiply(q_sign) - s_max = [] - for size in sizes: - share = size / storage_nominal_power - p_storage = storage_timeseries.p * share - q_storage = storage_timeseries.q * share - p_total = p_feeder + p_storage - q_total = q_feeder + q_storage - p_hv_mv_station = p_slack - p_storage - lf_ts = p_hv_mv_station.apply( - lambda _: lf["feed-in_case"] if _ < 0 else lf["load_case"] - ) - s_max_ts = (p_total**2 + q_total**2).apply(sqrt).divide(lf_ts) - s_max.append(max(s_max_ts)) - - return sizes[pd.Series(s_max).idxmin()] - - def _critical_nodes_feeder(edisgo, feeder): - """ - Returns all nodes in MV feeder with voltage issues. - - Parameters - ----------- - edisgo : :class:`~.network.network.EDisGo` - The original edisgo object. - feeder : :class:`~.network.components.Line` - MV feeder the storage will be connected to. The line object is an - object from the copied graph. - - Returns - ------- - :obj:`list` - List with all nodes in MV feeder with voltage issues. - - """ - # get all nodes with voltage issues in MV network - critical_nodes = check_tech_constraints.voltage_issues( - edisgo.network, voltage_levels="mv" - ) - if critical_nodes: - critical_nodes = critical_nodes[edisgo.network.mv_grid] - else: - return [] - - return [n for n in critical_nodes.index if repr(n.mv_feeder) == repr(feeder)] - - def _critical_lines_feeder(edisgo, feeder): - """ - Returns all lines in MV feeder with overload issues. - - Parameters - ----------- - edisgo : :class:`~.network.network.EDisGo` - The original edisgo object. - feeder : :class:`~.network.components.Line` - MV feeder the storage will be connected to. The line object is an - object from the copied graph. - - Returns - ------- - :pandas:`pandas.DataFrame` - Dataframe containing over-loaded lines in MV feeder, their maximum - relative over-loading and the corresponding time step. See - :func:`edisgo.flex_opt.check_tech_constraints.mv_line_overload` for - more information. - - """ - # return grid_expansion_costs_feeder_ranking[ - # (grid_expansion_costs_feeder_ranking.mv_feeder == feeder) & - # (grid_expansion_costs_feeder_ranking.voltage_level == 'mv')] - # get all overloaded MV lines - critical_lines = check_tech_constraints.mv_line_overload(edisgo.network) - # filter overloaded lines in feeder - critical_lines_feeder = [ - line - for line in critical_lines.index - if repr(tools.get_mv_feeder_from_line(line)) == repr(feeder) - ] - - return critical_lines.loc[critical_lines_feeder, :] - - def _estimate_new_number_of_lines(critical_lines_feeder): - return sum( - ( - ceil( - critical_lines_feeder.loc[crit_line, "max_rel_overload"] - * crit_line.quantity - ) - - crit_line.quantity - ) - for crit_line in critical_lines_feeder.index - ) - - raise NotImplementedError - - debug = kwargs.get("debug", False) - check_costs_reduction = kwargs.get("check_costs_reduction", False) - - # global variables - # minimum and maximum storage power to be connected to the MV network - p_storage_min = 300 - p_storage_max = 4500 - - # remaining storage nominal power - if storage_nominal_power is None: - storage_nominal_power = max(abs(storage_timeseries.p)) - p_storage_remaining = storage_nominal_power - - if debug: - feeder_repr = [] - storage_path = [] - storage_repr = [] - storage_size = [] - - # rank MV feeders by network expansion costs - - # conduct network reinforcement on copied edisgo object on worst-case time - # steps - grid_expansion_results_init = edisgo.reinforce( - copy_graph=True, timesteps_pfa="snapshot_analysis", mode="mv" - ) - - # only analyse storage integration if there were any network expansion needs - if grid_expansion_results_init.equipment_changes.empty: - logger.debug( - "No storage integration necessary since there are no " - "network expansion needs." - ) - return - else: - equipment_changes_reinforcement_init = ( - grid_expansion_results_init.equipment_changes.loc[ - grid_expansion_results_init.equipment_changes.iteration_step > 0 - ] - ) - total_grid_expansion_costs = ( - grid_expansion_results_init.grid_expansion_costs.total_costs.sum() - ) - if equipment_changes_reinforcement_init.empty: - logger.debug( - "No storage integration necessary since there are no " - "network expansion needs." - ) - return - else: - network = equipment_changes_reinforcement_init.index[0].grid.network - - # calculate network expansion costs without costs for new generators - # to be used in feeder ranking - grid_expansion_costs_feeder_ranking = costs.grid_expansion_costs( - network, without_generator_import=True, mode="mv" - ) - - ranked_feeders = _feeder_ranking(grid_expansion_costs_feeder_ranking) - - count = 1 - storage_obj_list = [] - total_grid_expansion_costs_new = "not calculated" - for feeder in ranked_feeders.values: - logger.debug("Feeder: {}".format(count)) - count += 1 - - # first step: find node where storage will be installed - - critical_nodes_feeder = _critical_nodes_feeder(edisgo, feeder) - critical_lines_feeder = _critical_lines_feeder(edisgo, feeder) - - # get node the storage will be connected to (in original graph) - battery_node = _find_battery_node( - edisgo, critical_lines_feeder, critical_nodes_feeder - ) - - if battery_node: - # add to output lists - if debug: - feeder_repr.append(repr(feeder)) - storage_path.append( - nx.shortest_path( - edisgo.network.mv_grid.graph, - edisgo.network.mv_grid.station, - battery_node, - ) - ) - - # second step: calculate storage size - - max_storage_size = min(p_storage_remaining, p_storage_max) - p_storage = _calc_storage_size(edisgo, feeder, max_storage_size) - - # if p_storage is greater than or equal to the minimum storage - # power required, do storage integration - if p_storage >= p_storage_min: - # third step: integrate storage - - share = p_storage / storage_nominal_power - edisgo.integrate_storage( - timeseries=storage_timeseries.p * share, - position=battery_node, - voltage_level="mv", - timeseries_reactive_power=storage_timeseries.q * share, - ) - tools.assign_mv_feeder_to_nodes(edisgo.network.mv_grid) - - # get new storage object - storage_obj = [ - _ - for _ in edisgo.network.mv_grid.graph.nodes_by_attribute("storage") - if _ in list(edisgo.network.mv_grid.graph.neighbors(battery_node)) - ][0] - storage_obj_list.append(storage_obj) - - logger.debug( - "Storage with nominal power of {} kW connected to " - "node {} (path to HV/MV station {}).".format( - p_storage, - battery_node, - nx.shortest_path( - battery_node.grid.graph, - battery_node.grid.station, - battery_node, - ), - ) - ) - - # fourth step: check if storage integration reduced network - # reinforcement costs or number of issues - - if check_costs_reduction == "each_feeder": - # calculate new network expansion costs - - grid_expansion_results_new = edisgo.reinforce( - copy_graph=True, timesteps_pfa="snapshot_analysis" - ) - - # fmt: off - total_grid_expansion_costs_new = ( - grid_expansion_results_new.grid_expansion_costs.total_costs.sum( - ) - ) - # fmt: on - - costs_diff = ( - total_grid_expansion_costs - total_grid_expansion_costs_new - ) - - if costs_diff > 0: - logger.debug( - "Storage integration in feeder {} reduced network " - "expansion costs by {} kEuro.".format(feeder, costs_diff) - ) - - if debug: - storage_repr.append(repr(storage_obj)) - storage_size.append(storage_obj.nominal_power) - - total_grid_expansion_costs = total_grid_expansion_costs_new - - else: - logger.debug( - "Storage integration in feeder {} did not reduce " - "network expansion costs (costs increased by {} " - "kEuro).".format(feeder, -costs_diff) - ) - - tools.disconnect_storage(edisgo.network, storage_obj) - p_storage = 0 - - if debug: - storage_repr.append(None) - storage_size.append(0) - - edisgo.integrate_storage( - timeseries=storage_timeseries.p * 0, - position=battery_node, - voltage_level="mv", - timeseries_reactive_power=storage_timeseries.q * 0, - ) - tools.assign_mv_feeder_to_nodes(edisgo.network.mv_grid) - - else: - number_parallel_lines_before = _estimate_new_number_of_lines( - critical_lines_feeder - ) - edisgo.analyze() - critical_lines_feeder_new = _critical_lines_feeder(edisgo, feeder) - critical_nodes_feeder_new = _critical_nodes_feeder(edisgo, feeder) - number_parallel_lines = _estimate_new_number_of_lines( - critical_lines_feeder_new - ) - - # if there are critical lines check if number of parallel - # lines was reduced - if not critical_lines_feeder.empty: - diff_lines = ( - number_parallel_lines_before - number_parallel_lines - ) - # if it was not reduced check if there are critical - # nodes and if the number was reduced - if diff_lines <= 0: - # if there are no critical nodes remove storage - if not critical_nodes_feeder: - logger.debug( - "Storage integration in feeder {} did not " - "reduce number of critical lines (number " - "increased by {}), storage " - "is therefore removed.".format(feeder, -diff_lines) - ) - - tools.disconnect_storage(edisgo.network, storage_obj) - p_storage = 0 - - if debug: - storage_repr.append(None) - storage_size.append(0) - - edisgo.integrate_storage( - timeseries=storage_timeseries.p * 0, - position=battery_node, - voltage_level="mv", - timeseries_reactive_power=storage_timeseries.q - * 0, - ) - tools.assign_mv_feeder_to_nodes( - edisgo.network.mv_grid - ) - else: - logger.debug( - "Critical nodes in feeder {} " - "before and after storage integration: " - "{} vs. {}".format( - feeder, - critical_nodes_feeder, - critical_nodes_feeder_new, - ) - ) - if debug: - storage_repr.append(repr(storage_obj)) - storage_size.append(storage_obj.nominal_power) - else: - logger.debug( - "Storage integration in feeder {} reduced " - "number of critical lines.".format(feeder) - ) - - if debug: - storage_repr.append(repr(storage_obj)) - storage_size.append(storage_obj.nominal_power) - - # if there are no critical lines - else: - logger.debug( - "Critical nodes in feeder {} " - "before and after storage integration: " - "{} vs. {}".format( - feeder, - critical_nodes_feeder, - critical_nodes_feeder_new, - ) - ) - if debug: - storage_repr.append(repr(storage_obj)) - storage_size.append(storage_obj.nominal_power) - - # fifth step: if there is storage capacity left, rerun - # the past steps for the next feeder in the ranking - # list - p_storage_remaining = p_storage_remaining - p_storage - if not p_storage_remaining > p_storage_min: - break - - else: - logger.debug("No storage integration in feeder {}.".format(feeder)) - - if debug: - storage_repr.append(None) - storage_size.append(0) - - edisgo.integrate_storage( - timeseries=storage_timeseries.p * 0, - position=battery_node, - voltage_level="mv", - timeseries_reactive_power=storage_timeseries.q * 0, - ) - tools.assign_mv_feeder_to_nodes(edisgo.network.mv_grid) - else: - logger.debug( - "No storage integration in feeder {} because there " - "are neither overloading nor voltage issues.".format(feeder) - ) - - if debug: - storage_repr.append(None) - storage_size.append(0) - feeder_repr.append(repr(feeder)) - storage_path.append([]) - - if check_costs_reduction == "once": - # check costs reduction and discard all storages if costs were not - # reduced - grid_expansion_results_new = edisgo.reinforce( - copy_graph=True, timesteps_pfa="snapshot_analysis" - ) - - total_grid_expansion_costs_new = ( - grid_expansion_results_new.grid_expansion_costs.total_costs.sum() - ) - - costs_diff = total_grid_expansion_costs - total_grid_expansion_costs_new - - if costs_diff > 0: - logger.info( - "Storage integration in network {} reduced network " - "expansion costs by {} kEuro.".format(edisgo.network.id, costs_diff) - ) - else: - logger.info( - "Storage integration in network {} did not reduce " - "network expansion costs (costs increased by {} " - "kEuro).".format(edisgo.network.id, -costs_diff) - ) - - for storage in storage_obj_list: - tools.disconnect_storage(edisgo.network, storage) - elif check_costs_reduction == "each_feeder": - # if costs redcution was checked after each storage only give out - # total costs reduction - if total_grid_expansion_costs_new == "not calculated": - costs_diff = 0 - else: - total_grid_expansion_costs = ( - grid_expansion_results_init.grid_expansion_costs.total_costs.sum() - ) - costs_diff = total_grid_expansion_costs - total_grid_expansion_costs_new - - logger.info( - "Storage integration in network {} reduced network " - "expansion costs by {} kEuro.".format(edisgo.network.id, costs_diff) - ) - - if debug: - plots.storage_size( - edisgo.network.mv_grid, - edisgo.network.pypsa, - filename="storage_results_{}.pdf".format(edisgo.network.id), - lopf=False, - ) - storages_df = pd.DataFrame( - { - "path": storage_path, - "repr": storage_repr, - "p_nom": storage_size, - }, - index=feeder_repr, - ) - storages_df.to_csv("storage_results_{}.csv".format(edisgo.network.id)) - - edisgo.network.results.storages_costs_reduction = pd.DataFrame( - { - "grid_expansion_costs_initial": total_grid_expansion_costs, - "grid_expansion_costs_with_storages": total_grid_expansion_costs_new, - }, - index=[edisgo.network.id], - ) diff --git a/edisgo/io/db.py b/edisgo/io/db.py index 02d1320e..19dfabcd 100644 --- a/edisgo/io/db.py +++ b/edisgo/io/db.py @@ -163,7 +163,7 @@ def engine(path: Path | str, ssh: bool = False) -> Engine: Returns ------- - sqlalchemy.engine.base.Engine + :sqlalchemy:`sqlalchemy.Engine` Database engine """ diff --git a/edisgo/io/dsm_import.py b/edisgo/io/dsm_import.py index fac856d9..648ae503 100644 --- a/edisgo/io/dsm_import.py +++ b/edisgo/io/dsm_import.py @@ -28,7 +28,7 @@ def oedb( ): """ Gets industrial and CTS DSM profiles from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. Profiles comprise minimum and maximum load increase in MW as well as maximum energy pre- and postponing in MWh. diff --git a/edisgo/io/generators_import.py b/edisgo/io/generators_import.py index e9628c6d..281e78a0 100755 --- a/edisgo/io/generators_import.py +++ b/edisgo/io/generators_import.py @@ -42,9 +42,9 @@ def oedb_legacy(edisgo_object, generator_scenario, **kwargs): The importer uses SQLAlchemy ORM objects. These are defined in `ego.io `_. The data is imported from the tables - `conventional power plants `_ and - `renewable power plants `_. When the generator data is retrieved, the following steps are conducted: diff --git a/edisgo/io/heat_pump_import.py b/edisgo/io/heat_pump_import.py index bb80064b..69133ca1 100644 --- a/edisgo/io/heat_pump_import.py +++ b/edisgo/io/heat_pump_import.py @@ -583,7 +583,7 @@ def _grid_integration( def efficiency_resistive_heaters_oedb(scenario, engine): """ Get efficiency of resistive heaters from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. Parameters ---------- diff --git a/edisgo/io/powermodels_io.py b/edisgo/io/powermodels_io.py index 16449bd1..541e01ff 100644 --- a/edisgo/io/powermodels_io.py +++ b/edisgo/io/powermodels_io.py @@ -667,7 +667,7 @@ def _build_gen(edisgo_obj, psa_net, pm, flexible_storage_units, s_base): gen.bus[gen_i], flexible_storage_units=flexible_storage_units, ) - pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "gen") + pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "generator") q = [ sign * np.tan(np.arccos(pf)) * gen.p_nom[gen_i], sign * np.tan(np.arccos(pf)) * gen.p_nom_min[gen_i], @@ -704,7 +704,7 @@ def _build_gen(edisgo_obj, psa_net, pm, flexible_storage_units, s_base): psa_net.storage_units.bus.loc[inflexible_storage_units[stor_i]], flexible_storage_units=flexible_storage_units, ) - pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "storage") + pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "storage_unit") p_g = max( [ psa_net.storage_units_t.p_set[inflexible_storage_units[stor_i]][0], @@ -837,7 +837,7 @@ def _build_branch(edisgo_obj, psa_net, pm, flexible_storage_units, s_base): flexible_storage_units=flexible_storage_units, ) # retrieve power factor from config - pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "storage") + pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "storage_unit") pm["branch"][str(stor_i + len(branches.index) + 1)] = { "name": "bss_branch_" + str(stor_i + 1), @@ -919,22 +919,22 @@ def _build_load( edisgo_obj.topology.loads_df.loc[loads_df.index[load_i]].type == "conventional_load" ): - pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "load") + pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "conventional_load") elif ( edisgo_obj.topology.loads_df.loc[loads_df.index[load_i]].type == "heat_pump" ): - pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "hp") + pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "heat_pump") elif ( edisgo_obj.topology.loads_df.loc[loads_df.index[load_i]].type == "charging_point" ): - pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "cp") + pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "charging_point") else: logger.warning( "No type specified for load {}. Power factor and sign will" "be set for conventional load.".format(loads_df.index[load_i]) ) - pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "load") + pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "conventional_load") p_d = psa_net.loads_t.p_set[loads_df.index[load_i]] q_d = psa_net.loads_t.q_set[loads_df.index[load_i]] pm["load"][str(load_i + 1)] = { @@ -955,12 +955,18 @@ def _build_load( psa_net.storage_units.bus.loc[inflexible_storage_units[stor_i]], flexible_storage_units=flexible_storage_units, ) - pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "storage") + pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "storage_unit") p_d = -min( - [psa_net.storage_units_t.p_set[inflexible_storage_units[stor_i]][0], 0] + [ + psa_net.storage_units_t.p_set[inflexible_storage_units[stor_i]][0], + np.float64(0.0), + ] ) q_d = -max( - [psa_net.storage_units_t.q_set[inflexible_storage_units[stor_i]][0], 0] + [ + psa_net.storage_units_t.q_set[inflexible_storage_units[stor_i]][0], + np.float64(0.0), + ] ) pm["load"][str(stor_i + len(loads_df.index) + 1)] = { "pd": p_d.round(20) / s_base, @@ -1030,7 +1036,7 @@ def _build_battery_storage( flexible_storage_units=flexible_storage_units, ) # retrieve power factor from config - pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "storage") + pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "storage_unit") e_max = ( psa_net.storage_units.p_nom.loc[flexible_storage_units[stor_i]] * psa_net.storage_units.max_hours.loc[flexible_storage_units[stor_i]] @@ -1145,7 +1151,7 @@ def _build_electromobility(edisgo_obj, psa_net, pm, s_base, flexible_cps): eta = edisgo_obj.electromobility.simbev_config_df.eta_cp.values[0] except IndexError: eta = 0.9 - pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "cp") + pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "charging_point") q = ( sign * np.tan(np.arccos(pf)) @@ -1212,7 +1218,7 @@ def _build_heatpump(psa_net, pm, edisgo_obj, s_base, flexible_hps): for hp_i in np.arange(len(heat_df.index)): idx_bus = _mapping(psa_net, edisgo_obj, heat_df.bus[hp_i]) # retrieve power factor and sign from config - pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "hp") + pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "heat_pump") q = sign * np.tan(np.arccos(pf)) * heat_df.p_set[hp_i] p_d = heat_df2[heat_df.index[hp_i]] pm["heatpumps"][str(hp_i + 1)] = { @@ -1440,7 +1446,7 @@ def _build_dsm(edisgo_obj, psa_net, pm, s_base, flexible_loads): for dsm_i in np.arange(len(dsm_df.index)): idx_bus = _mapping(psa_net, edisgo_obj, dsm_df.bus[dsm_i]) # retrieve power factor and sign from config - pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "load") + pf, sign = _get_pf(edisgo_obj, pm, idx_bus, "conventional_load") p_max = edisgo_obj.dsm.p_max[dsm_df.index[dsm_i]] p_min = edisgo_obj.dsm.p_min[dsm_df.index[dsm_i]] e_min = edisgo_obj.dsm.e_min[dsm_df.index[dsm_i]] @@ -2047,7 +2053,8 @@ def _get_pf(edisgo_obj, pm, idx_bus, kind): idx_bus : int Bus index from PowerModels bus dictionary. kind : str - Must be one of ["gen", "load", "storage", "hp", "cp"]. + Must be one of ["generator", "conventional_load", "storage_unit", "heat_pump", + "charging_point"]. Returns ------- @@ -2055,18 +2062,14 @@ def _get_pf(edisgo_obj, pm, idx_bus, kind): """ grid_level = pm["bus"][str(idx_bus)]["grid_level"] - pf = edisgo_obj.config._data["reactive_power_factor"][ - "{}_{}".format(grid_level, kind) - ] - sign = edisgo_obj.config._data["reactive_power_mode"][ - "{}_{}".format(grid_level, kind) - ] - if kind in ["gen", "storage"]: + pf = edisgo_obj.config["reactive_power_factor"]["{}_{}".format(grid_level, kind)] + sign = edisgo_obj.config["reactive_power_mode"]["{}_{}".format(grid_level, kind)] + if kind in ["generator", "storage_unit"]: if sign == "inductive": sign = -1 else: sign = 1 - elif kind in ["load", "hp", "cp"]: + elif kind in ["conventional_load", "heat_pump", "charging_point"]: if sign == "inductive": sign = 1 else: diff --git a/edisgo/io/timeseries_import.py b/edisgo/io/timeseries_import.py index 99204b1c..6c66f33a 100644 --- a/edisgo/io/timeseries_import.py +++ b/edisgo/io/timeseries_import.py @@ -74,7 +74,7 @@ def _timeindex_helper_func( def feedin_oedb_legacy(edisgo_object, timeindex=None): """ Import feed-in time series data for wind and solar power plants from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. Parameters ---------- @@ -158,7 +158,7 @@ def feedin_oedb( ): """ Import feed-in time series data for wind and solar power plants from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. Parameters ---------- @@ -297,6 +297,10 @@ def load_time_series_demandlib(edisgo_obj, timeindex=None): "day": edisgo_obj.config["demandlib"]["weekend_day"], "night": edisgo_obj.config["demandlib"]["weekend_night"], }, + "holiday": { + "day": edisgo_obj.config["demandlib"]["holiday_day"], + "night": edisgo_obj.config["demandlib"]["holiday_night"], + }, }, ) @@ -319,7 +323,7 @@ def load_time_series_demandlib(edisgo_obj, timeindex=None): def cop_oedb(edisgo_object, engine, weather_cell_ids, timeindex=None): """ Get COP (coefficient of performance) time series data from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. Parameters ---------- @@ -377,7 +381,7 @@ def cop_oedb(edisgo_object, engine, weather_cell_ids, timeindex=None): def heat_demand_oedb(edisgo_obj, scenario, engine, timeindex=None): """ Get heat demand profiles for heat pumps from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. Heat demand data is returned for all heat pumps in the grid. For more information on how individual heat demand profiles are obtained see @@ -498,7 +502,7 @@ def electricity_demand_oedb( ): """ Get electricity demand profiles for all conventional loads from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. Conventional loads comprise conventional electricity applications in the residential, CTS and industrial sector. @@ -1191,7 +1195,7 @@ def get_residential_electricity_profiles_per_building(building_ids, scenario, en List of building IDs to retrieve electricity demand profiles for. scenario : str Scenario for which to retrieve demand data. Possible options - are 'eGon2035' and 'eGon100RE'. + are 'eGon2021', 'eGon2035' and 'eGon100RE'. engine : :sqlalchemy:`sqlalchemy.Engine` Database engine. @@ -1220,30 +1224,21 @@ def _get_scaling_factors_of_zensus_cells(zensus_ids): column factor. """ - with session_scope_egon_data(engine) as session: - if scenario == "eGon2035": - query = session.query( - egon_household_electricity_profile_in_census_cell.cell_id, - egon_household_electricity_profile_in_census_cell.factor_2035.label( - "factor" - ), - ).filter( - egon_household_electricity_profile_in_census_cell.cell_id.in_( - zensus_ids - ) - ) - else: - query = session.query( - egon_household_electricity_profile_in_census_cell.cell_id, - egon_household_electricity_profile_in_census_cell.factor_2050.label( - "factor" - ), - ).filter( - egon_household_electricity_profile_in_census_cell.cell_id.in_( - zensus_ids - ) - ) - return pd.read_sql(query.statement, engine, index_col="cell_id") + if scenario == "eGon2021": + return pd.DataFrame(index=zensus_ids, data={"factor": 1.0}) + else: + with session_scope_egon_data(engine) as session: + if scenario == "eGon2035": + query = session.query( + hh_profile.cell_id, + hh_profile.factor_2035.label("factor"), + ).filter(hh_profile.cell_id.in_(zensus_ids)) + else: + query = session.query( + hh_profile.cell_id, + hh_profile.factor_2050.label("factor"), + ).filter(hh_profile.cell_id.in_(zensus_ids)) + return pd.read_sql(query.statement, engine, index_col="cell_id") def _get_profile_ids_of_buildings(building_ids): """ @@ -1302,7 +1297,9 @@ def _get_profiles(profile_ids): saio.register_schema("demand", engine) from saio.demand import ( - egon_household_electricity_profile_in_census_cell, + egon_household_electricity_profile_in_census_cell as hh_profile, + ) + from saio.demand import ( egon_household_electricity_profile_of_buildings, iee_household_load_profiles, ) @@ -1350,7 +1347,7 @@ def get_industrial_electricity_profiles_per_site(site_ids, scenario, engine): List of industrial site and OSM IDs to retrieve electricity demand profiles for. scenario : str Scenario for which to retrieve demand data. Possible options - are 'eGon2035' and 'eGon100RE'. + are 'eGon2021', 'eGon2035' and 'eGon100RE'. engine : :sqlalchemy:`sqlalchemy.Engine` Database engine. diff --git a/edisgo/network/grids.py b/edisgo/network/grids.py index 7f466b3d..20d50ed7 100644 --- a/edisgo/network/grids.py +++ b/edisgo/network/grids.py @@ -90,20 +90,19 @@ def graph(self): @property def geopandas(self): """ - Returns components as :geopandas:`GeoDataFrame`\\ s + Returns components as :geopandas:`GeoDataFrame`\\ s. Returns container with :geopandas:`GeoDataFrame`\\ s containing all georeferenced components within the grid. Returns ------- - :class:`~.tools.geopandas_helper.GeoPandasGridContainer` or \ - list(:class:`~.tools.geopandas_helper.GeoPandasGridContainer`) + :class:`~.tools.geopandas_helper.GeoPandasGridContainer` Data container with GeoDataFrames containing all georeferenced components - within the grid(s). + within the grid. """ - return to_geopandas(self) + return to_geopandas(self, srid=self.edisgo_obj.topology.grid_district["srid"]) @property def station(self): @@ -650,10 +649,3 @@ def draw( else: plt.savefig(filename, dpi=150, bbox_inches="tight", pad_inches=0.1) plt.close() - - @property - def geopandas(self): - """ - TODO: Remove this as soon as LVGrids are georeferenced - """ - raise NotImplementedError("LV Grids are not georeferenced yet.") diff --git a/edisgo/network/heat.py b/edisgo/network/heat.py index 22403d0f..1762f56c 100644 --- a/edisgo/network/heat.py +++ b/edisgo/network/heat.py @@ -131,7 +131,7 @@ def set_cop(self, edisgo_object, ts_cop, **kwargs): Write COP time series for heat pumps to py:attr:`~cop_df`. COP time series can either be given to this function or be obtained from the - `OpenEnergy DataBase `_. + `OpenEnergy DataBase `_. In case they are obtained from the OpenEnergy DataBase the heat pumps need to already be integrated into the grid, i.e. given in :attr:`~.network.topology.Topology.loads_df`. @@ -150,7 +150,7 @@ def set_cop(self, edisgo_object, ts_cop, **kwargs): * 'oedb' COP / efficiency data are obtained from the `OpenEnergy DataBase - `_. + `_. In case of heat pumps weather cell specific hourly COP time series are obtained (see :func:`edisgo.io.timeseries_import.cop_oedb` for more information). Using information on which weather cell each heat pump @@ -317,7 +317,7 @@ def set_heat_demand(self, edisgo_object, ts_heat_demand, **kwargs): Write heat demand time series of heat pumps to py:attr:`~heat_demand_df`. Heat demand time series can either be given to this function or be obtained from - the `OpenEnergy DataBase `_. + the `OpenEnergy DataBase `_. In case they are obtained from the OpenEnergy DataBase the heat pumps need to already be integrated into the grid, i.e. given in :attr:`~.network.topology.Topology.loads_df`. @@ -336,7 +336,7 @@ def set_heat_demand(self, edisgo_object, ts_heat_demand, **kwargs): * 'oedb' Heat demand time series are obtained from the `OpenEnergy DataBase - `_ (see + `_ (see :func:`edisgo.io.timeseries_import.heat_demand_oedb` for more information). Time series are only obtained for heat pumps that are already integrated diff --git a/edisgo/network/overlying_grid.py b/edisgo/network/overlying_grid.py index 77c9ccad..241768a2 100644 --- a/edisgo/network/overlying_grid.py +++ b/edisgo/network/overlying_grid.py @@ -385,6 +385,16 @@ def distribute_overlying_grid_requirements(edisgo_obj): scaling_df_min = ( edisgo_obj.dsm.p_min.transpose() / edisgo_obj.dsm.p_min.sum(axis=1) ) + # in case p_max/p_min of all DSM loads is zero in an hour but there is + # positive/negative DSM from the overlying grid, this is not correctly + # distributed and may lead to large errors in the time series with the + # distributed DSM + # in the following this is corrected by assuming an equal distribution + # during those hours + equal_dist_factor = 1 / len(dsm_loads) + scaling_df_max.fillna(equal_dist_factor, inplace=True) + scaling_df_min.fillna(equal_dist_factor, inplace=True) + edisgo_copy.timeseries._loads_active_power.loc[:, dsm_loads] = ( edisgo_obj.timeseries._loads_active_power.loc[:, dsm_loads] + ( diff --git a/edisgo/network/results.py b/edisgo/network/results.py index 8f6e8b04..151fbd48 100755 --- a/edisgo/network/results.py +++ b/edisgo/network/results.py @@ -628,7 +628,7 @@ def _add_line_to_equipment_changes(self, line): "iteration_step": [0], "change": ["added"], "equipment": [line.type_info], - "quantity": [1], + "quantity": [line.num_parallel], }, index=[line.name], ), diff --git a/edisgo/network/timeseries.py b/edisgo/network/timeseries.py index b53856f1..99a1ba32 100644 --- a/edisgo/network/timeseries.py +++ b/edisgo/network/timeseries.py @@ -821,10 +821,10 @@ def _worst_case_generators(self, cases, df, configs): # reactive power # get worst case configurations for each generator power_factor = q_control._fixed_cosphi_default_power_factor( - df, "generators", configs + df, "generator", configs ) q_sign = q_control._fixed_cosphi_default_reactive_power_sign( - df, "generators", configs + df, "generator", configs ) # write reactive power configuration to TimeSeriesRaw self.time_series_raw.q_control.drop(df.index, errors="ignore", inplace=True) @@ -899,10 +899,10 @@ def _worst_case_conventional_load(self, cases, df, configs): # reactive power # get worst case configurations for each load power_factor = q_control._fixed_cosphi_default_power_factor( - df, "conventional_loads", configs + df, "conventional_load", configs ) q_sign = q_control._fixed_cosphi_default_reactive_power_sign( - df, "conventional_loads", configs + df, "conventional_load", configs ) # write reactive power configuration to TimeSeriesRaw self.time_series_raw.q_control.drop(df.index, errors="ignore", inplace=True) @@ -999,10 +999,10 @@ def _worst_case_charging_points(self, cases, df, configs): # reactive power # get worst case configurations for each charging point power_factor = q_control._fixed_cosphi_default_power_factor( - df, "charging_points", configs + df, "charging_point", configs ) q_sign = q_control._fixed_cosphi_default_reactive_power_sign( - df, "charging_points", configs + df, "charging_point", configs ) # write reactive power configuration to TimeSeriesRaw self.time_series_raw.q_control.drop(df.index, errors="ignore", inplace=True) @@ -1077,10 +1077,10 @@ def _worst_case_heat_pumps(self, cases, df, configs): # reactive power # get worst case configurations for each heat pump power_factor = q_control._fixed_cosphi_default_power_factor( - df, "heat_pumps", configs + df, "heat_pump", configs ) q_sign = q_control._fixed_cosphi_default_reactive_power_sign( - df, "heat_pumps", configs + df, "heat_pump", configs ) # write reactive power configuration to TimeSeriesRaw self.time_series_raw.q_control.drop(df.index, errors="ignore", inplace=True) @@ -1153,10 +1153,10 @@ def _worst_case_storage_units(self, cases, df, configs): # reactive power # get worst case configurations for each load power_factor = q_control._fixed_cosphi_default_power_factor( - df, "storage_units", configs + df, "storage_unit", configs ) q_sign = q_control._fixed_cosphi_default_reactive_power_sign( - df, "storage_units", configs + df, "storage_unit", configs ) # write reactive power configuration to TimeSeriesRaw self.time_series_raw.q_control.drop(df.index, errors="ignore", inplace=True) @@ -1204,7 +1204,7 @@ def predefined_fluctuating_generators_by_technology( Technology and weather cell specific hourly feed-in time series are obtained from the `OpenEnergy DataBase - `_. See + `_. See :func:`edisgo.io.timeseries_import.feedin_oedb` for more information. This option requires that the parameter `engine` is provided in case @@ -1448,6 +1448,12 @@ def predefined_conventional_loads_by_sector( load_names = self._check_if_components_exist(edisgo_object, load_names, "loads") loads_df = edisgo_object.topology.loads_df.loc[load_names, :] + # check if loads contain annual demand + if not all(loads_df.annual_consumption.notnull()): + raise AttributeError( + "The annual consumption of some loads is missing. Please provide" + ) + # scale time series by annual consumption ts_scaled = loads_df.apply( lambda x: ts_loads[x.sector] * x.annual_consumption, @@ -1600,7 +1606,7 @@ def _get_q_sign_and_power_factor_per_component( q_sign, q_control._fixed_cosphi_default_reactive_power_sign( df[df["type"] == load_type], - f"{load_type}s", + load_type, edisgo_object.config, ), ] @@ -1610,17 +1616,17 @@ def _get_q_sign_and_power_factor_per_component( power_factor, q_control._fixed_cosphi_default_power_factor( df[df["type"] == load_type], - f"{load_type}s", + load_type, edisgo_object.config, ), ] ) else: q_sign = q_control._fixed_cosphi_default_reactive_power_sign( - df, type, edisgo_object.config + df, type[:-1], edisgo_object.config ) power_factor = q_control._fixed_cosphi_default_power_factor( - df, type, edisgo_object.config + df, type[:-1], edisgo_object.config ) elif isinstance(parametrisation, pd.DataFrame): # check if all given components exist in network and only use existing @@ -1653,7 +1659,7 @@ def _get_q_sign_and_power_factor_per_component( q_sign, default_func( df[df["type"] == load_type], - f"{load_type}s", + load_type, edisgo_object.config, ), ] @@ -1662,7 +1668,9 @@ def _get_q_sign_and_power_factor_per_component( q_sign = pd.concat( [ q_sign, - default_func(df, type, edisgo_object.config), + default_func( + df, type[:-1], edisgo_object.config + ), ] ) else: @@ -1686,7 +1694,7 @@ def _get_q_sign_and_power_factor_per_component( power_factor, default_func( df[df["type"] == load_type], - f"{load_type}s", + load_type, edisgo_object.config, ), ] @@ -1695,7 +1703,9 @@ def _get_q_sign_and_power_factor_per_component( power_factor = pd.concat( [ power_factor, - default_func(df, type, edisgo_object.config), + default_func( + df, type[:-1], edisgo_object.config + ), ] ) else: diff --git a/edisgo/network/topology.py b/edisgo/network/topology.py index f411bf31..1eb0a927 100755 --- a/edisgo/network/topology.py +++ b/edisgo/network/topology.py @@ -5,6 +5,7 @@ import random import warnings +from typing import TYPE_CHECKING from zipfile import ZipFile import networkx as nx @@ -15,7 +16,7 @@ from edisgo.network.components import Switch from edisgo.network.grids import LVGrid, MVGrid -from edisgo.tools import geo, networkx_helper +from edisgo.tools import geo, geopandas_helper, networkx_helper from edisgo.tools.tools import ( calculate_apparent_power, calculate_line_reactance, @@ -30,6 +31,9 @@ from shapely.ops import transform from shapely.wkt import loads as wkt_loads +if TYPE_CHECKING: + from edisgo.tools.geopandas_helper import GeoPandasGridContainer + logger = logging.getLogger(__name__) COLUMNS = { @@ -1928,7 +1932,13 @@ def connect_to_mv(self, edisgo_object, comp_data, comp_type="generator"): # avoid very short lines by limiting line length to at least 1m line_length = max(line_length, 0.001) - line_type, num_parallel = select_cable(edisgo_object, "mv", power) + line_type, num_parallel = select_cable( + edisgo_obj=edisgo_object, + level="mv", + apparent_power=power, + length=line_length, + component_type=comp_type, + ) line_name = self.add_line( bus0=self.mv_grid.station.index[0], @@ -1975,13 +1985,12 @@ def connect_to_mv(self, edisgo_object, comp_data, comp_type="generator"): for dist_min_obj in conn_objects_min_stack: # do not allow connection to virtual busses if "virtual" not in dist_min_obj["repr"]: - line_type, num_parallel = select_cable(edisgo_object, "mv", power) target_obj_result = self._connect_mv_bus_to_target_object( edisgo_object=edisgo_object, bus=self.buses_df.loc[bus, :], target_obj=dist_min_obj, - line_type=line_type.name, - number_parallel_lines=num_parallel, + comp_type=comp_type, + power=power, ) if target_obj_result is not None: @@ -2123,7 +2132,7 @@ def _choose_random_substation_id(): """ if comp_type == "generator": - random.seed(a=comp_data["generator_id"]) + random.seed(a=int(comp_data["generator_id"])) elif comp_type == "storage_unit": random.seed(a=len(self.storage_units_df)) else: @@ -2448,7 +2457,12 @@ def connect_to_lv_based_on_geolocation( return comp_name def _connect_mv_bus_to_target_object( - self, edisgo_object, bus, target_obj, line_type, number_parallel_lines + self, + edisgo_object, + bus, + target_obj, + comp_type, + power, ): """ Connects given MV bus to given target object (MV line or bus). @@ -2477,11 +2491,12 @@ def _connect_mv_bus_to_target_object( * shp : :shapely:`Shapely Point object` or \ :shapely:`Shapely Line object` Geometry of line or bus to connect to. - - line_type : str - Line type to use to connect new component with. - number_parallel_lines : int - Number of parallel lines to connect new component with. + comp_type : str + Type of added component. Can be 'generator', 'charging_point', 'heat_pump' + or 'storage_unit'. + Default: 'generator'. + power : float + Nominal power of the new component to be connected. Returns ------- @@ -2598,6 +2613,13 @@ def _connect_mv_bus_to_target_object( "branch_detour_factor" ], ) + line_type, num_parallel = select_cable( + edisgo_obj=edisgo_object, + level="mv", + apparent_power=power, + length=line_length, + component_type=comp_type, + ) # avoid very short lines by limiting line length to at least 1m if line_length < 0.001: line_length = 0.001 @@ -2606,8 +2628,8 @@ def _connect_mv_bus_to_target_object( bus1=bus.name, length=line_length, kind="cable", - type_info=line_type, - num_parallel=number_parallel_lines, + type_info=line_type.name, + num_parallel=num_parallel, ) # add line to equipment changes edisgo_object.results._add_line_to_equipment_changes( @@ -2624,7 +2646,7 @@ def _connect_mv_bus_to_target_object( # bus is the nearest connection point else: - # add new branch for satellite (station to station) + # add new line between new bus and closest bus line_length = geo.calc_geo_dist_vincenty( grid_topology=self, bus_source=bus.name, @@ -2633,6 +2655,13 @@ def _connect_mv_bus_to_target_object( "branch_detour_factor" ], ) + line_type, num_parallel = select_cable( + edisgo_obj=edisgo_object, + level="mv", + apparent_power=power, + length=line_length, + component_type=comp_type, + ) # avoid very short lines by limiting line length to at least 1m if line_length < 0.001: line_length = 0.001 @@ -2642,8 +2671,8 @@ def _connect_mv_bus_to_target_object( bus1=bus.name, length=line_length, kind="cable", - type_info=line_type, - num_parallel=number_parallel_lines, + type_info=line_type.name, + num_parallel=num_parallel, ) # add line to equipment changes @@ -2721,7 +2750,13 @@ def _connect_to_lv_bus(self, edisgo_object, target_bus, comp_type, comp_data): line_length = max(line_length, 0.001) # get suitable line type - line_type, num_parallel = select_cable(edisgo_object, "lv", comp_data["p"]) + line_type, num_parallel = select_cable( + edisgo_obj=edisgo_object, + level="lv", + apparent_power=comp_data["p"], + component_type=comp_type, + length=line_length, + ) line_name = self.add_line( bus0=target_bus, bus1=b, @@ -2756,7 +2791,9 @@ def to_graph(self): self.transformers_df, ) - def to_geopandas(self, mode: str = "mv"): + def to_geopandas( + self, mode: str | None = None, lv_grid_id: int | None = None + ) -> GeoPandasGridContainer: """ Returns components as :geopandas:`GeoDataFrame`\\ s. @@ -2766,23 +2803,29 @@ def to_geopandas(self, mode: str = "mv"): Parameters ---------- mode : str - Return mode. If mode is "mv" the mv components are returned. If mode is "lv" - a generator with a container per lv grid is returned. Default: "mv" + If `mode` is None, GeoDataFrames for the MV grid and underlying LV grids is + returned. If `mode` is "mv", GeoDataFrames for only the MV grid are + returned. If `mode` is "lv", GeoDataFrames for the LV grid specified through + `lv_grid_id` are returned. + Default: None. + lv_grid_id : int + Only needs to be provided in case `mode` is "lv". In that case `lv_grid_id` + gives the LV grid ID as integer of the LV grid for which to return the + geodataframes. Returns ------- - :class:`~.tools.geopandas_helper.GeoPandasGridContainer` or \ - list(:class:`~.tools.geopandas_helper.GeoPandasGridContainer`) + :class:`~.tools.geopandas_helper.GeoPandasGridContainer` Data container with GeoDataFrames containing all georeferenced components - within the grid(s). + within the grid. """ - if mode == "mv": + if mode is None: + return geopandas_helper.to_geopandas(self, srid=self.grid_district["srid"]) + elif mode == "mv": return self.mv_grid.geopandas elif mode == "lv": - raise NotImplementedError("LV Grids are not georeferenced yet.") - # for lv_grid in self.mv_grid.lv_grids: - # yield lv_grid.geopandas + return self.get_lv_grid(name=lv_grid_id).geopandas else: raise ValueError(f"{mode} is not valid. See docstring for more info.") @@ -3092,6 +3135,9 @@ def check_integrity(self): f"optimisation." ) + # check for meshed grid + self.find_meshes() + def assign_feeders(self, mode: str = "grid_feeder"): """ Assigns MV or LV feeder to each bus and line, depending on the `mode`. @@ -3160,3 +3206,31 @@ def aggregate_lv_grid_at_station(self, lv_grid_id: int | str) -> None: def __repr__(self): return f"Network topology {self.id}" + + def find_meshes(edisgo_obj) -> list[list[int]] | None: + """ + Find all meshes in the grid. + + Parameters + ---------- + edisgo_obj : EDisGo + EDisGo object. + + Returns + ------- + Optional[List[List[int]]] + List of all meshes in the grid. + Each mesh is represented as a list of node indices. + If no meshes are found, None is returned. + """ + meshes = nx.cycle_basis(edisgo_obj.to_graph()) + if meshes: + logger.warning( + "Grid contains mesh(es). Be aware, that the grid expansion methodology " + "is currently not able to handle meshes. Further, the optimisation of " + "flexibility dispatch is not exact in case of meshed grids, but can " + "still be used." + ) + return meshes + else: + return None diff --git a/edisgo/opf/eDisGo_OPF.jl/src/core/data.jl b/edisgo/opf/eDisGo_OPF.jl/src/core/data.jl index 1063cc1f..f180f831 100644 --- a/edisgo/opf/eDisGo_OPF.jl/src/core/data.jl +++ b/edisgo/opf/eDisGo_OPF.jl/src/core/data.jl @@ -1,7 +1,4 @@ function set_ac_bf_start_values!(network::Dict{String,<:Any}) - for (i,bus) in network["bus"] - bus["w_start"] = bus["w"] - end for (i,gen) in network["gen_nd"] gen["pgc_start"] = gen["pgc"] diff --git a/edisgo/opf/eDisGo_OPF.jl/src/form/bf.jl b/edisgo/opf/eDisGo_OPF.jl/src/form/bf.jl index e1db2d1b..1245fe7b 100644 --- a/edisgo/opf/eDisGo_OPF.jl/src/form/bf.jl +++ b/edisgo/opf/eDisGo_OPF.jl/src/form/bf.jl @@ -118,6 +118,23 @@ function constraint_max_line_loading(pm::AbstractSOCBFModelEdisgo, n::Int) end +function constraint_max_line_loading(pm::AbstractNCBFModelEdisgo, n::Int) + p = PowerModels.var(pm, n, :p) + q = PowerModels.var(pm, n, :q) + ll = PowerModels.var(pm, 1, :ll) + s_nom = Dict(i => get(branch, "rate_a", 1.0) for (i,branch) in PowerModels.ref(pm, n, :branch)) + + for (i,branch) in PowerModels.ref(pm, n, :branch) + f_bus = branch["f_bus"] + t_bus = branch["t_bus"] + f_idx = (i, f_bus, t_bus) + if !(branch["storage"]) + JuMP.@constraint(pm.model, (p[f_idx]^2 + q[f_idx]^2)/s_nom[i]^2 <= ll[f_idx]) + end + end +end + + function constraint_power_balance(pm::AbstractBFModelEdisgo, n::Int, i, bus_gens, bus_gens_nd, bus_gens_slack, bus_loads, bus_arcs_to, bus_arcs_from, bus_lines_to, bus_storage, bus_pg, bus_qg, bus_pg_nd, bus_qg_nd, bus_pd, bus_qd, branch_r, branch_x, bus_dsm, bus_hps, bus_cps, bus_storage_pf, bus_dsm_pf, bus_hps_pf, bus_cps_pf, bus_gen_nd_pf, bus_gen_d_pf, bus_loads_pf, branch_strg_pf) pt = get(PowerModels.var(pm, n), :p, Dict()); PowerModels._check_var_keys(pt, bus_arcs_to, "active power", "branch") qt = get(PowerModels.var(pm, n), :q, Dict()); PowerModels._check_var_keys(qt, bus_arcs_to, "reactive power", "branch") diff --git a/edisgo/opf/powermodels_opf.py b/edisgo/opf/powermodels_opf.py index db4925d3..85da160a 100644 --- a/edisgo/opf/powermodels_opf.py +++ b/edisgo/opf/powermodels_opf.py @@ -4,26 +4,29 @@ import subprocess import sys +from typing import Optional + import numpy as np from edisgo.flex_opt import exceptions from edisgo.io.powermodels_io import from_powermodels +from edisgo.network.topology import Topology logger = logging.getLogger(__name__) def pm_optimize( edisgo_obj, - s_base=1, - flexible_cps=None, - flexible_hps=None, - flexible_loads=None, - flexible_storage_units=None, - opf_version=1, - method="soc", - warm_start=False, - silence_moi=False, -): + s_base: int = 1, + flexible_cps: Optional[np.ndarray] = None, + flexible_hps: Optional[np.ndarray] = None, + flexible_loads: Optional[np.ndarray] = None, + flexible_storage_units: Optional[np.ndarray] = None, + opf_version: int = 1, + method: str = "soc", + warm_start: bool = False, + silence_moi: bool = False, +) -> None: """ Run OPF for edisgo object in julia subprocess and write results of OPF to edisgo object. Results of OPF are time series of operation schedules of flexibilities. @@ -105,6 +108,7 @@ def pm_optimize( Default: True. """ + Topology.find_meshes(edisgo_obj) opf_dir = os.path.dirname(os.path.abspath(__file__)) solution_dir = os.path.join(opf_dir, "opf_solutions") pm, hv_flex_dict = edisgo_obj.to_powermodels( diff --git a/edisgo/tools/config.py b/edisgo/tools/config.py index 54fc08a3..7494943a 100644 --- a/edisgo/tools/config.py +++ b/edisgo/tools/config.py @@ -116,7 +116,7 @@ class Config: Get reactive power factor for generators in the MV network - >>> config['reactive_power_factor']['mv_gen'] + >>> config['reactive_power_factor']['mv_generator'] """ diff --git a/edisgo/tools/geopandas_helper.py b/edisgo/tools/geopandas_helper.py index 6c48a62e..14066d56 100644 --- a/edisgo/tools/geopandas_helper.py +++ b/edisgo/tools/geopandas_helper.py @@ -11,6 +11,7 @@ if TYPE_CHECKING: from edisgo.network.grids import Grid + from edisgo.network.topology import Topology COMPONENTS: list[str] = [ "generators_df", @@ -162,14 +163,17 @@ def plot(self): raise NotImplementedError -def to_geopandas(grid_obj: Grid): +def to_geopandas(grid_obj: Grid | Topology, srid: int) -> GeoPandasGridContainer: """ - Translates all DataFrames with geolocations within a Grid class to GeoDataFrames. + Translates all DataFrames with geolocations within a grid topology to GeoDataFrames. Parameters ---------- - grid_obj : :class:`~.network.grids.Grid` - Grid object to transform. + grid_obj : :class:`~.network.grids.Grid` or :class:`~.network.topology.Topology` + Grid or Topology object to transform. + srid : int + SRID (spatial reference ID) of x and y coordinates of buses. Usually given in + Topology.grid_district["srid"]. Returns ------- @@ -178,9 +182,6 @@ def to_geopandas(grid_obj: Grid): their geolocation. """ - # get srid id - srid = grid_obj._edisgo_obj.topology.grid_district["srid"] - # convert buses_df buses_df = grid_obj.buses_df buses_df = buses_df.assign( @@ -204,25 +205,27 @@ def to_geopandas(grid_obj: Grid): crs=f"EPSG:{srid}", ) if components_dict[component.replace("_df", "_gdf")].empty: - components_dict[component.replace("_df", "_gdf")].index = components_dict[ - component.replace("_df", "_gdf") - ].index.astype(object) + components_dict[component.replace("_df", "_gdf")].index = attr.index # convert lines_df lines_df = grid_obj.lines_df - geom_0 = lines_df.merge( - buses_gdf[["geometry"]], left_on="bus0", right_index=True - ).geometry - geom_1 = lines_df.merge( - buses_gdf[["geometry"]], left_on="bus1", right_index=True - ).geometry - - geometry = [ - LineString([point_0, point_1]) for point_0, point_1 in list(zip(geom_0, geom_1)) - ] - - lines_gdf = gpd.GeoDataFrame(lines_df.assign(geometry=geometry), crs=f"EPSG:{srid}") + lines_gdf = lines_df.merge( + buses_gdf[["geometry", "v_nom"]].rename(columns={"geometry": "geom_0"}), + left_on="bus0", + right_index=True, + ) + lines_gdf = lines_gdf.merge( + buses_gdf[["geometry"]].rename(columns={"geometry": "geom_1"}), + left_on="bus1", + right_index=True, + ) + lines_gdf["geometry"] = lines_gdf.apply( + lambda _: LineString([_["geom_0"], _["geom_1"]]), axis=1 + ) + lines_gdf = gpd.GeoDataFrame( + lines_gdf.drop(columns=["geom_0", "geom_1"]), crs=f"EPSG:{srid}" + ) return GeoPandasGridContainer( crs=f"EPSG:{srid}", diff --git a/edisgo/tools/logger.py b/edisgo/tools/logger.py index 5b1a62a5..20514f94 100644 --- a/edisgo/tools/logger.py +++ b/edisgo/tools/logger.py @@ -119,7 +119,7 @@ def setup_logger( def create_dir(dir_path): if not os.path.isdir(dir_path): - os.mkdir(dir_path) + os.makedirs(dir_path) def get_default_root_dir(): dir_path = str(cfg_edisgo.get("user_dirs", "root_dir")) @@ -140,9 +140,9 @@ def create_home_dir(): log_dir = os.path.join( get_default_root_dir(), cfg_edisgo.get("user_dirs", "log_dir") ) - create_dir(log_dir) if log_dir is not None: + create_dir(log_dir) file_name = os.path.join(log_dir, file_name) if reset_loggers: diff --git a/edisgo/tools/plots.py b/edisgo/tools/plots.py index 381c7375..3a120778 100644 --- a/edisgo/tools/plots.py +++ b/edisgo/tools/plots.py @@ -6,7 +6,6 @@ from typing import TYPE_CHECKING import matplotlib -import matplotlib.cm as cm import numpy as np import pandas as pd import plotly.graph_objects as go @@ -604,7 +603,7 @@ def nodes_by_costs(buses, grid_expansion_costs, edisgo_obj): bus_sizes, bus_colors = nodes_by_costs( pypsa_plot.buses.index, grid_expansion_costs, edisgo_obj ) - bus_cmap = plt.cm.get_cmap(lines_cmap) + bus_cmap = matplotlib.pyplot.colormaps.get_cmap(lines_cmap) elif node_color == "curtailment": bus_sizes = nodes_curtailment(pypsa_plot.buses.index, curtailment_df) bus_colors = "orangered" @@ -681,7 +680,8 @@ def nodes_by_costs(buses, grid_expansion_costs, edisgo_obj): line_width = pypsa_plot.lines.s_nom * scaling_factor_line_width else: line_width = 2 - cmap = plt.cm.get_cmap(lines_cmap) + cmap = matplotlib.pyplot.colormaps.get_cmap(lines_cmap) + ll = pypsa_plot.plot( line_colors=line_colors, line_cmap=cmap, @@ -888,7 +888,7 @@ def color_map_color( """ norm = matplotlib.colors.Normalize(vmin=vmin, vmax=vmax) if isinstance(cmap_name, str): - cmap = cm.get_cmap(cmap_name) + cmap = matplotlib.pyplot.colormaps.get_cmap(cmap_name) else: cmap = matplotlib.colors.LinearSegmentedColormap.from_list("mycmap", cmap_name) rgb = cmap(norm(abs(value)))[:3] diff --git a/edisgo/tools/temporal_complexity_reduction.py b/edisgo/tools/temporal_complexity_reduction.py index 5dfc0090..973d23b3 100644 --- a/edisgo/tools/temporal_complexity_reduction.py +++ b/edisgo/tools/temporal_complexity_reduction.py @@ -3,7 +3,6 @@ import logging import os -from copy import deepcopy from typing import TYPE_CHECKING import numpy as np @@ -18,55 +17,87 @@ logger = logging.getLogger(__name__) -def _scored_most_critical_loading(edisgo_obj: EDisGo) -> pd.Series: +def _scored_most_critical_loading( + edisgo_obj: EDisGo, weight_by_costs: bool = True +) -> pd.Series: """ - Method to get time steps where at least one component shows its highest overloading. + Get time steps sorted by severity of overloadings. + + The overloading can be weighted by the estimated expansion costs of each respective + line and transformer. See parameter `weight_by_costs` for more information. Parameters ----------- edisgo_obj : :class:`~.EDisGo` + weight_by_costs : bool + Defines whether overloading issues should be weighted by estimated grid + expansion costs or not. See parameter `weight_by_costs` in + :func:`~get_most_critical_time_steps` for more information. + Default: True. Returns -------- :pandas:`pandas.Series` Series with time index and corresponding sum of maximum relative overloadings - of lines and transformers. The series only contains time steps, where at least - one component is maximally overloaded, and is sorted descending by the - sum of maximum relative overloadings. + of lines and transformers (weighted by estimated reinforcement costs, in case + `weight_by_costs` is True). The series only contains time steps, where at least + one component is maximally overloaded, and is sorted descending order. """ - + # ToDo The relative loading is used in this function to determine most critical + # time steps. While this makes sense to determine which lines are overloaded, it + # is not the best indicator for the weighting as it does not convey the number + # of additional lines needed to solve a problem. For that the number of parallel + # standard lines and transformers needed would be better. However, for now + # using the relative overloading as an estimation is okay. # Get current relative to allowed current relative_i_res = check_tech_constraints.components_relative_load(edisgo_obj) # Get lines that have violations crit_lines_score = relative_i_res[relative_i_res > 1] - # Get most critical timesteps per component + # Get most critical time steps per component + crit_lines_score = crit_lines_score[crit_lines_score == crit_lines_score.max()] + + if weight_by_costs: + # weight line violations with expansion costs + costs = _costs_per_line_and_transformer(edisgo_obj) + crit_lines_score = crit_lines_score * costs.loc[crit_lines_score.columns] + else: + crit_lines_score = crit_lines_score - 1 + + # drop components and time steps without violations crit_lines_score = ( - (crit_lines_score[crit_lines_score == crit_lines_score.max()]) - .dropna(how="all") - .dropna(how="all", axis=1) + crit_lines_score.dropna(how="all").dropna(how="all", axis=1).fillna(0) ) - - # Sort according to highest cumulated relative overloading - crit_lines_score = (crit_lines_score - 1).sum(axis=1) - return crit_lines_score.sort_values(ascending=False) + # sort sum in descending order + return crit_lines_score.sum(axis=1).sort_values(ascending=False) -def _scored_most_critical_voltage_issues(edisgo_obj: EDisGo) -> pd.Series: +def _scored_most_critical_voltage_issues( + edisgo_obj: EDisGo, weight_by_costs: bool = True +) -> pd.Series: """ Method to get time steps where at least one bus shows its highest deviation from allowed voltage boundaries. + The voltage issues can be weighted by the estimated expansion costs in each + respective feeder. See parameter `weight_by_costs` for more information. + Parameters ----------- edisgo_obj : :class:`~.EDisGo` + weight_by_costs : bool + Defines whether voltage issues should be weighted by estimated grid expansion + costs or not. See parameter `weight_by_costs` in + :func:`~get_most_critical_time_steps` for more information. + Default: True. Returns -------- :pandas:`pandas.Series` - Series with time index and corresponding sum of maximum voltage deviations. + Series with time index and corresponding sum of maximum voltage deviations + (weighted by estimated reinforcement costs, in case `weight_by_costs` is True). The series only contains time steps, where at least one bus has its highest deviation from the allowed voltage limits, and is sorted descending by the sum of maximum voltage deviations. @@ -77,18 +108,42 @@ def _scored_most_critical_voltage_issues(edisgo_obj: EDisGo) -> pd.Series: ) # Get score for nodes that are over or under the allowed deviations - voltage_diff = voltage_diff.abs()[voltage_diff.abs() > 0] + voltage_diff = voltage_diff[voltage_diff != 0.0].abs() # get only most critical events for component # Todo: should there be different ones for over and undervoltage? - voltage_diff = ( - (voltage_diff[voltage_diff.abs() == voltage_diff.abs().max()]) - .dropna(how="all") - .dropna(how="all", axis=1) - ) - - voltage_diff = voltage_diff.sum(axis=1) + voltage_diff = voltage_diff[voltage_diff == voltage_diff.max()] + + if weight_by_costs: + # set feeder using MV feeder for MV components and LV feeder for LV components + edisgo_obj.topology.assign_feeders(mode="grid_feeder") + # feeders of buses at MV/LV station's secondary sides are set to the name of the + # station bus to have them as separate feeders + lv_station_buses = [ + lv_grid.station.index[0] for lv_grid in edisgo_obj.topology.mv_grid.lv_grids + ] + edisgo_obj.topology.buses_df.loc[ + lv_station_buses, "grid_feeder" + ] = lv_station_buses + # weight voltage violations with expansion costs + costs = _costs_per_feeder(edisgo_obj, lv_station_buses=lv_station_buses) + # map feeder costs to buses + feeder_buses = edisgo_obj.topology.buses_df.grid_feeder + costs_buses = pd.Series( + { + bus_name: ( + costs[feeder_buses[bus_name]] + if feeder_buses[bus_name] != "station_node" + else 0 + ) + for bus_name in feeder_buses.index + } + ) + voltage_diff = voltage_diff * costs_buses.loc[voltage_diff.columns] - return voltage_diff.sort_values(ascending=False) + # drop components and time steps without violations + voltage_diff = voltage_diff.dropna(how="all").dropna(how="all", axis=1).fillna(0) + # sort sum in descending order + return voltage_diff.sum(axis=1).sort_values(ascending=False) def _scored_most_critical_loading_time_interval( @@ -97,12 +152,13 @@ def _scored_most_critical_loading_time_interval( time_steps_per_day=24, time_step_day_start=0, overloading_factor=0.95, + weight_by_costs=True, ): """ Get time intervals sorted by severity of overloadings. - The overloading is weighed by the estimated expansion costs of each respective line - and transformer. + The overloading can be weighted by the estimated expansion costs of each respective + line and transformer. See parameter `weight_by_costs` for more information. The length of the time intervals and hour of day at which the time intervals should begin can be set through the parameters `time_steps_per_time_interval` and `time_step_day_start`. @@ -115,24 +171,29 @@ def _scored_most_critical_loading_time_interval( The eDisGo API object time_steps_per_time_interval : int Amount of continuous time steps in an interval that violation is determined for. - Currently, these can only be multiples of 24. + See parameter `time_steps_per_time_interval` in + :func:`~get_most_critical_time_intervals` for more information. Default: 168. time_steps_per_day : int - Number of time steps in one day. In case of an hourly resolution this is 24. - As currently only an hourly resolution is possible, this value should always be - 24. + Number of time steps in one day. See parameter `time_steps_per_day` in + :func:`~get_most_critical_time_intervals` for more information. Default: 24. time_step_day_start : int - Time step of the day at which each interval should start. If you want it to - start at midnight, this should be set to 0. Default: 0. + Time step of the day at which each interval should start. See parameter + `time_step_day_start` in :func:`~get_most_critical_time_intervals` for more + information. + Default: 0. overloading_factor : float Factor at which an overloading of a component is considered to be close enough - to the highest overloading of that component. This is used to determine the - number of components that reach their highest overloading in each time interval. - Per default, it is set to 0.95, which means that if the highest overloading of - a component is 2, it will be considered maximally overloaded at an overloading - of higher or equal to 2*0.95. + to the highest overloading of that component. See parameter + `overloading_factor` in :func:`~get_most_critical_time_intervals` for more + information. Default: 0.95. + weight_by_costs : bool + Defines whether overloading issues should be weighted by estimated grid + expansion costs or not. See parameter `weight_by_costs` in + :func:`~get_most_critical_time_intervals` for more information. + Default: True. Returns -------- @@ -153,70 +214,22 @@ def _scored_most_critical_loading_time_interval( # Get lines that have violations and replace nan values with 0 crit_lines_score = relative_i_res[relative_i_res > 1].fillna(0) - # weight line violations with expansion costs - costs_lines = ( - line_expansion_costs(edisgo_obj).drop(columns="voltage_level").sum(axis=1) - ) - costs_trafos_lv = pd.Series( - index=[ - str(lv_grid) + "_station" - for lv_grid in list(edisgo_obj.topology.mv_grid.lv_grids) - ], - data=edisgo_obj.config["costs_transformers"]["lv"], - ) - costs_trafos_mv = pd.Series( - index=["MVGrid_" + str(edisgo_obj.topology.id) + "_station"], - data=edisgo_obj.config["costs_transformers"]["mv"], - ) - costs = pd.concat([costs_lines, costs_trafos_lv, costs_trafos_mv]) - crit_lines_cost = crit_lines_score * costs - - # Get highest overloading in each window for each component and sum it up - crit_timesteps = ( - crit_lines_cost.rolling( - window=int(time_steps_per_time_interval), closed="right" - ) - .max() - .sum(axis=1) - ) - # select each nth time window to only consider windows starting at a certain time - # of day and sort time intervals in descending order - # ToDo: To make function work for frequencies other than hourly, the following - # needs to be adapted to index based on time index instead of iloc - crit_timesteps = ( - crit_timesteps.iloc[int(time_steps_per_time_interval) - 1 :] - .iloc[time_step_day_start + 1 :: time_steps_per_day] - .sort_values(ascending=False) - ) - # move time index as rolling gives the end of the time interval, but we want the - # beginning - timesteps = crit_timesteps.index - pd.DateOffset( - hours=int(time_steps_per_time_interval) - ) - time_intervals = [ - pd.date_range( - start=timestep, periods=int(time_steps_per_time_interval), freq="h" - ) - for timestep in timesteps - ] - - # make dataframe with time steps in each time interval and the percentage of - # components that reach their maximum overloading - time_intervals_df = pd.DataFrame( - index=range(len(time_intervals)), - columns=["time_steps", "percentage_max_overloaded_components"], + if weight_by_costs: + # weight line violations with expansion costs + costs = _costs_per_line_and_transformer(edisgo_obj) + crit_lines_weighted = crit_lines_score * costs + else: + crit_lines_weighted = crit_lines_score.copy() + + time_intervals_df = _most_critical_time_interval( + costs_per_time_step=crit_lines_weighted, + grid_issues_magnitude_df=crit_lines_score, + which="overloading", + deviation_factor=overloading_factor, + time_steps_per_time_interval=time_steps_per_time_interval, + time_steps_per_day=time_steps_per_day, + time_step_day_start=time_step_day_start, ) - time_intervals_df["time_steps"] = time_intervals - lines_no_max = crit_lines_score.columns.values - total_lines = len(lines_no_max) - max_per_line = crit_lines_score.max() - for i in range(len(time_intervals)): - # check if worst overloading of every line is included in time interval - max_per_line_ti = crit_lines_score.loc[time_intervals[i]].max() - time_intervals_df["percentage_max_overloaded_components"][i] = ( - len(max_per_line_ti[max_per_line_ti >= max_per_line * overloading_factor]) - / total_lines - ) return time_intervals_df @@ -227,12 +240,13 @@ def _scored_most_critical_voltage_issues_time_interval( time_steps_per_day=24, time_step_day_start=0, voltage_deviation_factor=0.95, + weight_by_costs=True, ): """ Get time intervals sorted by severity of voltage issues. - The voltage issues are weighed by the estimated expansion costs in each respective - feeder. + The voltage issues can be weighted by the estimated expansion costs in each + respective feeder. See parameter `weight_by_costs` for more information. The length of the time intervals and hour of day at which the time intervals should begin can be set through the parameters `time_steps_per_time_interval` and `time_step_day_start`. @@ -245,25 +259,29 @@ def _scored_most_critical_voltage_issues_time_interval( The eDisGo API object time_steps_per_time_interval : int Amount of continuous time steps in an interval that violation is determined for. - Currently, these can only be multiples of 24. + See parameter `time_steps_per_time_interval` in + :func:`~get_most_critical_time_intervals` for more information. Default: 168. time_steps_per_day : int - Number of time steps in one day. In case of an hourly resolution this is 24. - As currently only an hourly resolution is possible, this value should always be - 24. + Number of time steps in one day. See parameter `time_steps_per_day` in + :func:`~get_most_critical_time_intervals` for more information. Default: 24. time_step_day_start : int - Time step of the day at which each interval should start. If you want it to - start at midnight, this should be set to 0. Default: 0. + Time step of the day at which each interval should start. See parameter + `time_step_day_start` in :func:`~get_most_critical_time_intervals` for more + information. + Default: 0. voltage_deviation_factor : float Factor at which a voltage deviation at a bus is considered to be close enough - to the highest voltage deviation at that bus. This is used to determine the - number of buses that reach their highest voltage deviation in each time - interval. Per default, it is set to 0.95. This means that if the highest voltage - deviation at a bus is 0.2, it will be included in the determination of number - of buses that reach their maximum voltage deviation in a certain time interval - at a voltage deviation of higher or equal to 0.2*0.95. + to the highest voltage deviation at that bus. See parameter + `voltage_deviation_factor` in :func:`~get_most_critical_time_intervals` for more + information. Default: 0.95. + weight_by_costs : bool + Defines whether voltage issues should be weighted by estimated grid expansion + costs or not. See parameter `weight_by_costs` in + :func:`~get_most_critical_time_intervals` for more information. + Default: True. Returns -------- @@ -280,31 +298,122 @@ def _scored_most_critical_voltage_issues_time_interval( """ - # Get voltage deviation from allowed voltage limits + # get voltage deviation from allowed voltage limits voltage_diff = check_tech_constraints.voltage_deviation_from_allowed_voltage_limits( edisgo_obj ) - voltage_diff = voltage_diff.abs()[voltage_diff.abs() > 0] + voltage_diff = voltage_diff[voltage_diff != 0.0].abs().fillna(0) - # determine costs per feeder + # set feeder using MV feeder for MV components and LV feeder for LV components + edisgo_obj.topology.assign_feeders(mode="grid_feeder") + # feeders of buses at MV/LV station's secondary sides are set to the name of the + # station bus to have them as separate feeders lv_station_buses = [ lv_grid.station.index[0] for lv_grid in edisgo_obj.topology.mv_grid.lv_grids ] + edisgo_obj.topology.buses_df.loc[lv_station_buses, "grid_feeder"] = lv_station_buses + + # check for every feeder if any of the buses within violate the allowed voltage + # deviation, by grouping voltage_diff per feeder + feeder_buses = edisgo_obj.topology.buses_df.grid_feeder + columns = [feeder_buses.loc[col] for col in voltage_diff.columns] + voltage_diff_feeder = voltage_diff.copy() + voltage_diff_feeder.columns = columns + voltage_diff_feeder = ( + voltage_diff.transpose().reset_index().groupby(by="Bus").max().transpose() + ) + + if weight_by_costs: + # get costs per feeder + costs_per_feeder = _costs_per_feeder(edisgo_obj, lv_station_buses) + # weight feeder voltage violation with costs per feeder + voltage_diff_feeder = voltage_diff_feeder * costs_per_feeder + + time_intervals_df = _most_critical_time_interval( + costs_per_time_step=voltage_diff_feeder, + grid_issues_magnitude_df=voltage_diff, + which="voltage", + deviation_factor=voltage_deviation_factor, + time_steps_per_time_interval=time_steps_per_time_interval, + time_steps_per_day=time_steps_per_day, + time_step_day_start=time_step_day_start, + ) + + return time_intervals_df + + +def _costs_per_line_and_transformer(edisgo_obj): + """ + Helper function to get costs per line (including earthwork and costs for one new + line) and per transformer. + + Transformers are named after the grid at the lower voltage level and with the + expansion "_station", e.g. "LVGrid_0_station". + + Returns + ------- + :pandas:`pandas.Series` + Series with component name in index and costs in kEUR as values. + + """ costs_lines = ( line_expansion_costs(edisgo_obj).drop(columns="voltage_level").sum(axis=1) ) costs_trafos_lv = pd.Series( - index=lv_station_buses, - data=edisgo_obj.config._data["costs_transformers"]["lv"], + index=[ + str(lv_grid) + "_station" + for lv_grid in list(edisgo_obj.topology.mv_grid.lv_grids) + ], + data=edisgo_obj.config["costs_transformers"]["lv"], ) - costs = pd.concat([costs_lines, costs_trafos_lv]) + costs_trafos_mv = pd.Series( + index=["MVGrid_" + str(edisgo_obj.topology.id) + "_station"], + data=edisgo_obj.config["costs_transformers"]["mv"], + ) + return pd.concat([costs_lines, costs_trafos_lv, costs_trafos_mv]) + + +def _costs_per_feeder(edisgo_obj, lv_station_buses=None): + """ + Helper function to get costs per MV and LV feeder (including earthwork and costs for + one new line) and per MV/LV transformer (as they are considered as feeders). + + Transformers are named after the bus at the MV/LV station's secondary side. + + Parameters + ----------- + edisgo_obj : :class:`~.EDisGo` + lv_station_buses : list(str) or None + List of bus names of buses at the secondary side of the MV/LV transformers. + If None, list is generated. + + Returns + ------- + :pandas:`pandas.Series` + Series with feeder names in index and costs in kEUR as values. + + """ + if lv_station_buses is None: + lv_station_buses = [ + lv_grid.station.index[0] for lv_grid in edisgo_obj.topology.mv_grid.lv_grids + ] + if "grid_feeder" not in edisgo_obj.topology.buses_df.columns: + # set feeder using MV feeder for MV components and LV feeder for LV components + edisgo_obj.topology.assign_feeders(mode="grid_feeder") - # set feeder using MV feeder for MV components and LV feeder for LV components - edisgo_obj.topology.assign_feeders(mode="grid_feeder") # feeders of buses at MV/LV station's secondary sides are set to the name of the # station bus to have them as separate feeders edisgo_obj.topology.buses_df.loc[lv_station_buses, "grid_feeder"] = lv_station_buses + costs_lines = ( + line_expansion_costs(edisgo_obj).drop(columns="voltage_level").sum(axis=1) + ) + costs_trafos_lv = pd.Series( + index=lv_station_buses, + data=edisgo_obj.config._data["costs_transformers"]["lv"], + ) + costs = pd.concat([costs_lines, costs_trafos_lv]) + feeder_lines = edisgo_obj.topology.lines_df.grid_feeder feeder_trafos_lv = pd.Series( index=lv_station_buses, @@ -317,23 +426,82 @@ def _scored_most_critical_voltage_issues_time_interval( .sum() ) - # check for every feeder if any of the buses within violate the allowed voltage - # deviation, by grouping voltage_diff per feeder - feeder_buses = edisgo_obj.topology.buses_df.grid_feeder - columns = [feeder_buses.loc[col] for col in voltage_diff.columns] - voltage_diff_copy = deepcopy(voltage_diff).fillna(0) - voltage_diff.columns = columns - voltage_diff_feeder = ( - voltage_diff.transpose().reset_index().groupby(by="index").sum().transpose() - ) - voltage_diff_feeder[voltage_diff_feeder != 0] = 1 + return costs_per_feeder.squeeze() + - # weigh feeder voltage violation with costs per feeder - voltage_diff_feeder = voltage_diff_feeder * costs_per_feeder.squeeze() +def _most_critical_time_interval( + costs_per_time_step, + grid_issues_magnitude_df, + which, + deviation_factor=0.95, + time_steps_per_time_interval=168, + time_steps_per_day=24, + time_step_day_start=0, +): + """ + Helper function used in functions + :func:`~_scored_most_critical_loading_time_interval` and + :func:`~_scored_most_critical_voltage_issues_time_interval` + to get time intervals sorted by severity of grid issue. + + This function currently only works for an hourly resolution! - # Get the highest voltage issues in each window for each feeder and sum it up + Parameters + ----------- + costs_per_time_step : :pandas:`pandas.DataFrame` + Dataframe containing the estimated grid expansion costs per line or feeder. + Columns contain line or feeder names. + Index of the dataframe are all time steps power flow analysis + was conducted for of type :pandas:`pandas.Timestamp`. + grid_issues_magnitude_df : :pandas:`pandas.DataFrame` + Dataframe containing the relative overloading or voltage deviation per time + step in case of an overloading or voltage issue in that time step. + Columns contain line or bus names. + Index of the dataframe are all time steps power flow analysis + was conducted for of type :pandas:`pandas.Timestamp`. + which : str + Defines whether function is used to determine most critical time intervals for + voltage or overloading problems. Can either be "voltage" or "overloading". + deviation_factor : float + Factor at which a grid issue is considered to be close enough to the highest + grid issue. In case parameter `which` is "voltage", see parameter + `voltage_deviation_factor` in :func:`~get_most_critical_time_intervals` for more + information. In case parameter `which` is "overloading", see parameter + `overloading_factor` in :func:`~get_most_critical_time_intervals` for more + information. + Default: 0.95. + time_steps_per_time_interval : int + Amount of continuous time steps in an interval that violation is determined for. + See parameter `time_steps_per_time_interval` in + :func:`~get_most_critical_time_intervals` for more information. + Default: 168. + time_steps_per_day : int + Number of time steps in one day. See parameter `time_steps_per_day` in + :func:`~get_most_critical_time_intervals` for more information. + Default: 24. + time_step_day_start : int + Time step of the day at which each interval should start. See parameter + `time_step_day_start` in :func:`~get_most_critical_time_intervals` for more + information. + Default: 0. + + Returns + -------- + :pandas:`pandas.DataFrame` + Contains time intervals in which grid expansion needs due to voltage issues + are detected. The time intervals are sorted descending + by the expected cumulated grid expansion costs, so that the time interval with + the highest expected costs corresponds to index 0. The time steps in the + respective time interval are given in column "time_steps" and the share + of buses for which the maximum voltage deviation is reached during the time + interval is given in column "percentage_buses_max_voltage_deviation". Each bus + is only considered once. That means if its maximum voltage deviation was + already considered in an earlier time interval, it is not considered again. + + """ + # get the highest issues in each window for each feeder and sum it up crit_timesteps = ( - voltage_diff_feeder.rolling( + costs_per_time_step.rolling( window=int(time_steps_per_time_interval), closed="right" ) .max() @@ -345,44 +513,51 @@ def _scored_most_critical_voltage_issues_time_interval( # needs to be adapted to index based on time index instead of iloc crit_timesteps = ( crit_timesteps.iloc[int(time_steps_per_time_interval) - 1 :] - .iloc[time_step_day_start + 1 :: time_steps_per_day] + .iloc[time_step_day_start::time_steps_per_day] .sort_values(ascending=False) ) - timesteps = crit_timesteps.index - pd.DateOffset( - hours=int(time_steps_per_time_interval) - ) + # get time steps in each time interval - these are set up setting the given time + # step to be the end of the respective time interval, as rolling() function gives + # the time step at the end of the considered time interval; further, only time + # intervals with a sum greater than zero are considered, as zero values mean, that + # there is no grid issue in the respective time interval time_intervals = [ - pd.date_range( - start=timestep, periods=int(time_steps_per_time_interval), freq="h" - ) - for timestep in timesteps + pd.date_range(end=timestep, periods=int(time_steps_per_time_interval), freq="h") + for timestep in crit_timesteps.index + if crit_timesteps[timestep] != 0.0 ] # make dataframe with time steps in each time interval and the percentage of - # buses that reach their maximum voltage deviation + # buses/branches that reach their maximum voltage deviation / overloading + if which == "voltage": + percentage = "percentage_buses_max_voltage_deviation" + else: + percentage = "percentage_max_overloaded_components" time_intervals_df = pd.DataFrame( index=range(len(time_intervals)), - columns=["time_steps", "percentage_buses_max_voltage_deviation"], + columns=["time_steps", percentage], ) time_intervals_df["time_steps"] = time_intervals - max_per_bus = voltage_diff_copy.max().fillna(0) - buses_no_max = max_per_bus.index.values - total_buses = len(buses_no_max) + max_per_bus = grid_issues_magnitude_df.max().fillna(0) + total_buses = len(grid_issues_magnitude_df.columns) for i in range(len(time_intervals)): # check if worst voltage deviation of every bus is included in time interval - max_per_bus_ti = voltage_diff_copy.loc[time_intervals[i]].max() - time_intervals_df["percentage_buses_max_voltage_deviation"][i] = ( - len( - max_per_bus_ti[max_per_bus_ti >= max_per_bus * voltage_deviation_factor] - ) + max_per_bus_ti = grid_issues_magnitude_df.loc[time_intervals[i]].max() + time_intervals_df[percentage][i] = ( + len(max_per_bus_ti[max_per_bus_ti >= max_per_bus * deviation_factor]) / total_buses ) - return time_intervals_df -def _troubleshooting_mode(edisgo_obj): +def _troubleshooting_mode( + edisgo_obj, + mode=None, + timesteps=None, + lv_grid_id=None, + scale_timeseries=None, +): """ Handles non-convergence issues in power flow by iteratively reducing load and feed-in until the power flow converges. @@ -390,10 +565,36 @@ def _troubleshooting_mode(edisgo_obj): Load and feed-in is reduced in steps of 10% down to 20% of the original load and feed-in. The most critical time intervals / time steps can then be determined based on the power flow results with the reduced load and feed-in. + + Parameters + ----------- + edisgo_obj : :class:`~.EDisGo` + The eDisGo API object + mode : str or None + Allows to toggle between power flow analysis for the whole network or just + the MV or one LV grid. See parameter `mode` in function + :attr:`~.EDisGo.analyze` for more information. + timesteps : :pandas:`pandas.DatetimeIndex` or \ + :pandas:`pandas.Timestamp` + Timesteps specifies from which time steps to select most critical ones. It + defaults to None in which case all time steps in + :attr:`~.network.timeseries.TimeSeries.timeindex` are used. + lv_grid_id : int or str + ID (e.g. 1) or name (string representation, e.g. "LVGrid_1") of LV grid + to analyze in case mode is 'lv'. Default: None. + scale_timeseries : float or None + See parameter `scale_timeseries` in function :attr:`~.EDisGo.analyze` for more + information. + """ try: logger.debug("Running initial power flow for temporal complexity reduction.") - edisgo_obj.analyze() + edisgo_obj.analyze( + mode=mode, + timesteps=timesteps, + lv_grid_id=lv_grid_id, + scale_timeseries=scale_timeseries, + ) # Exception is used, as non-convergence can also lead to RuntimeError, not only # ValueError except Exception: @@ -404,12 +605,17 @@ def _troubleshooting_mode(edisgo_obj): "not all time steps converged. Power flow is run again with reduced " "network load." ) - for fraction in np.arange(0.8, 0.0, step=-0.1): + if isinstance(scale_timeseries, float): + iter_start = scale_timeseries - 0.1 + else: + iter_start = 0.8 + for fraction in np.arange(iter_start, 0.0, step=-0.1): try: edisgo_obj.analyze( - troubleshooting_mode="iteration", - range_start=fraction, - range_num=1, + mode=mode, + timesteps=timesteps, + lv_grid_id=lv_grid_id, + scale_timeseries=fraction, ) logger.info( f"Power flow fully converged for a reduction factor " @@ -442,11 +648,12 @@ def get_most_critical_time_intervals( use_troubleshooting_mode=True, overloading_factor=0.95, voltage_deviation_factor=0.95, + weight_by_costs=True, ): """ Get time intervals sorted by severity of overloadings as well as voltage issues. - The overloading and voltage issues are weighed by the estimated expansion costs + The overloading and voltage issues can be weighted by the estimated expansion costs solving the issue would require. The length of the time intervals and hour of day at which the time intervals should begin can be set through the parameters `time_steps_per_time_interval` and @@ -503,6 +710,33 @@ def get_most_critical_time_intervals( of buses that reach their maximum voltage deviation in a certain time interval at a voltage deviation of higher or equal to 0.2*0.95. Default: 0.95. + weight_by_costs : bool + Defines whether overloading and voltage issues should be weighted by estimated + grid expansion costs or not. This can be done in order to take into account that + some grid issues are more relevant, as reinforcing a certain line or feeder will + be more expensive than another one. + + In case of voltage issues: + If True, the costs for each MV and LV feeder, as well as MV/LV station are + determined using the costs for earth work and new lines over the full length of + the feeder respectively for a new MV/LV station. In each time interval, the + estimated costs are only taken into account, in case there is a voltage issue + somewhere in the feeder. + The costs don't convey the actual costs but are an estimation, as + the real number of parallel lines needed is not determined and the whole feeder + length is used instead of the length over two-thirds of the feeder. + If False, only the maximum voltage deviation in the feeder is used to determine + the most relevant time intervals. + + In case of overloading issues: + If True, the overloading of each line is multiplied by the respective grid + expansion costs of that line including costs for earth work and one new line. + The costs don't convey the actual costs but are an estimation, as + the discrete number of needed parallel lines is not considered. + If False, only the relative overloading is used to determine the most relevant + time intervals. + + Default: True. Returns -------- @@ -544,6 +778,7 @@ def get_most_critical_time_intervals( time_steps_per_time_interval, time_step_day_start=time_step_day_start, overloading_factor=overloading_factor, + weight_by_costs=weight_by_costs, ) if num_time_intervals is None: num_time_intervals = int(np.ceil(len(loading_scores) * percentage)) @@ -564,6 +799,7 @@ def get_most_critical_time_intervals( time_steps_per_time_interval, time_step_day_start=time_step_day_start, voltage_deviation_factor=voltage_deviation_factor, + weight_by_costs=weight_by_costs, ) if num_time_intervals is None: num_time_intervals = int(np.ceil(len(voltage_scores) * percentage)) @@ -601,10 +837,16 @@ def get_most_critical_time_intervals( def get_most_critical_time_steps( edisgo_obj: EDisGo, + mode=None, + timesteps=None, + lv_grid_id=None, + scale_timeseries=None, num_steps_loading=None, num_steps_voltage=None, percentage: float = 1.0, use_troubleshooting_mode=True, + run_initial_analyze=True, + weight_by_costs=True, ) -> pd.DatetimeIndex: """ Get the time steps with the most critical overloading and voltage issues. @@ -613,6 +855,21 @@ def get_most_critical_time_steps( ----------- edisgo_obj : :class:`~.EDisGo` The eDisGo API object + mode : str or None + Allows to toggle between power flow analysis for the whole network or just + the MV or one LV grid. See parameter `mode` in function + :attr:`~.EDisGo.analyze` for more information. + timesteps : :pandas:`pandas.DatetimeIndex` or \ + :pandas:`pandas.Timestamp` + Timesteps specifies from which time steps to select most critical ones. It + defaults to None in which case all time steps in + :attr:`~.network.timeseries.TimeSeries.timeindex` are used. + lv_grid_id : int or str + ID (e.g. 1) or name (string representation, e.g. "LVGrid_1") of LV grid + to analyze in case mode is 'lv'. Default: None. + scale_timeseries : float or None + See parameter `scale_timeseries` in function :attr:`~.EDisGo.analyze` for more + information. num_steps_loading : int The number of most critical overloading events to select. If None, `percentage` is used. Default: None. @@ -630,6 +887,35 @@ def get_most_critical_time_steps( are then determined based on the power flow results with the reduced load and feed-in. If False, an error will be raised in case time steps do not converge. Default: True. + run_initial_analyze : bool + This parameter can be used to specify whether to run an initial analyze to + determine most critical time steps or to use existing results. If set to False, + `use_troubleshooting_mode` is ignored. Default: True. + weight_by_costs : bool + Defines whether overloading and voltage issues should be weighted by estimated + grid expansion costs or not. This can be done in order to take into account that + some grid issues are more relevant, as reinforcing a certain line or feeder will + be more expensive than another one. + + In case of voltage issues: + If True, the voltage issues at each bus are weighted by the estimated grid + expansion costs for the MV or LV feeder the bus is in or in case of MV/LV + stations by the costs for a new transformer. Feeder costs are determined using + the costs for earth work and new lines over the full length of the feeder. + The costs don't convey the actual costs but are an estimation, as + the real number of parallel lines needed is not determined and the whole feeder + length is used instead of the length over two-thirds of the feeder. + If False, the severity of each feeder's voltage issue is set to be the same. + + In case of overloading issues: + If True, the overloading of each line is multiplied by + the respective grid expansion costs of that line including costs for earth work + and one new line. + The costs don't convey the actual costs but are an estimation, as + the discrete needed number of parallel lines is not considered. + If False, only the relative overloading is used. + + Default: True. Returns -------- @@ -639,14 +925,30 @@ def get_most_critical_time_steps( """ # Run power flow - if use_troubleshooting_mode: - edisgo_obj = _troubleshooting_mode(edisgo_obj) - else: - logger.debug("Running initial power flow for temporal complexity reduction.") - edisgo_obj.analyze() + if run_initial_analyze: + if use_troubleshooting_mode: + edisgo_obj = _troubleshooting_mode( + edisgo_obj, + mode=mode, + timesteps=timesteps, + lv_grid_id=lv_grid_id, + scale_timeseries=scale_timeseries, + ) + else: + logger.debug( + "Running initial power flow for temporal complexity reduction." + ) + edisgo_obj.analyze( + mode=mode, + timesteps=timesteps, + lv_grid_id=lv_grid_id, + scale_timeseries=scale_timeseries, + ) # Select most critical steps based on current violations - loading_scores = _scored_most_critical_loading(edisgo_obj) + loading_scores = _scored_most_critical_loading( + edisgo_obj, weight_by_costs=weight_by_costs + ) if num_steps_loading is None: num_steps_loading = int(len(loading_scores) * percentage) else: @@ -658,10 +960,18 @@ def get_most_critical_time_steps( f"{len(loading_scores)} time steps are exported." ) num_steps_loading = len(loading_scores) + elif num_steps_loading < len(loading_scores): + logger.info( + f"{num_steps_loading} of a total of {len(loading_scores)} relevant " + f"time steps for overloading issues are chosen for the selection " + f"of most critical time steps." + ) steps = loading_scores[:num_steps_loading].index # Select most critical steps based on voltage violations - voltage_scores = _scored_most_critical_voltage_issues(edisgo_obj) + voltage_scores = _scored_most_critical_voltage_issues( + edisgo_obj, weight_by_costs=weight_by_costs + ) if num_steps_voltage is None: num_steps_voltage = int(len(voltage_scores) * percentage) else: @@ -673,6 +983,12 @@ def get_most_critical_time_steps( f"{len(voltage_scores)} time steps are exported." ) num_steps_voltage = len(voltage_scores) + elif num_steps_voltage < len(voltage_scores): + logger.info( + f"{num_steps_voltage} of a total of {len(voltage_scores)} relevant " + f"time steps for voltage issues are chosen for the selection " + f"of most critical time steps." + ) steps = steps.append(voltage_scores[:num_steps_voltage].index) if len(steps) == 0: diff --git a/edisgo/tools/tools.py b/edisgo/tools/tools.py index 48da0455..ba523a00 100644 --- a/edisgo/tools/tools.py +++ b/edisgo/tools/tools.py @@ -14,7 +14,7 @@ from sqlalchemy.engine.base import Engine -from edisgo.flex_opt import exceptions +from edisgo.flex_opt import exceptions, q_control from edisgo.io.db import session_scope_egon_data, sql_grid_geom, sql_intersects from edisgo.tools import session_scope @@ -193,13 +193,157 @@ def drop_duplicated_columns(df, keep="last"): return df.loc[:, ~df.columns.duplicated(keep=keep)] -def select_cable(edisgo_obj, level, apparent_power): +def calculate_voltage_diff_pu_per_line( + s_max: float | np.ndarray, + r_total: float | np.ndarray, + x_total: float | np.ndarray, + v_nom: float | np.ndarray, + q_sign: int, + power_factor: float, +) -> float | np.ndarray: """ - Selects suitable cable type and quantity using given apparent power. + Calculate the voltage difference across a line in p.u.. - Cable is selected to be able to carry the given `apparent_power`, no load - factor is considered. Overhead lines are not considered in choosing a - suitable cable. + Parameters + ---------- + s_max : float or array-like + Apparent power the cable must carry in MVA. + r_total : float or array-like + Total resistance of the line in Ohms. + x_total : float or array-like + Total reactance of the line in Ohms. + v_nom : float or array-like + Nominal voltage of the line in kV. + q_sign : int + `q_sign` defines whether the reactive power is positive or + negative and must either be -1 or +1. In case of generators and storage units, + inductive reactive power is negative. In case of loads, inductive reactive + power is positive. + power_factor : :pandas:`pandas.Series` or float + Ratio of real to apparent power. + + Returns + ------- + float or array-like + Voltage difference in p.u.. If positive, the voltage difference behaves like + expected, it rises for generators and drops for loads. If negative, + the voltage difference behaves counterintuitively, it drops for generators + and rises for loads. + + """ + sin_phi = np.sqrt(1 - power_factor**2) + # Calculate the voltage difference using the formula from VDE-AR-N 4105 + voltage_diff = (s_max / (v_nom**2)) * ( + r_total * power_factor + q_sign * x_total * sin_phi + ) + return voltage_diff # in pu + + +def calculate_voltage_diff_pu_per_line_from_type( + edisgo_obj: EDisGo, + cable_names: str | np.ndarray, + length: float, + num_parallel: int, + v_nom: float | np.ndarray, + s_max: float | np.ndarray, + component_type: str, +) -> float | np.ndarray: + """ + Calculate the voltage difference across a line in p.u. depending on line type + and component type. + + This function serves as a helper function for function + :py:func:`calculate_voltage_diff_pu_per_line`, as it automatically obtains the + equipment data per line type from the provided equipment data and default reactive + power data per component type from the configuration files. + + Parameters + ---------- + edisgo_obj : :class:`~.EDisGo` + cable_names : str or array-like + Resistance per kilometer of the cable in ohm/km. + length : float + Length of the cable in km. + num_parallel : int + Number of parallel cables. + v_nom : int + Nominal voltage of the cable(s) in kV. + s_max : float + Apparent power the cable must carry in MVA. + component_type : str, optional + Type of the component to be connected, used to obtain the default reactive power + mode and power factor from the configuration file. If this is given, + `reactive_power_mode` and `power_factor` are not considered. + Possible options are "generator", "conventional_load", "charging_point", + "heat_pump" and "storage_unit". + + Returns + ------- + float or array-like + Voltage difference in p.u.. If positive, the voltage difference behaves like + expected, it rises for generators and drops for loads. If negative, + the voltage difference behaves counterintuitively, it drops for generators + and rises for loads. + + """ + # calculate total resistance and reactance for the given length and + # number of parallel cables for given cable types + config_type = "mv_cables" if v_nom > 1.0 else "lv_cables" + cable_data = edisgo_obj.topology.equipment_data[config_type] + r_total = calculate_line_resistance( + cable_data.loc[cable_names, "R_per_km"], length, num_parallel + ) + x_total = calculate_line_reactance( + cable_data.loc[cable_names, "L_per_km"], length, num_parallel + ) + + # get sign of reactive power based on component type + config_type = f"mv_{component_type}" if v_nom > 1.0 else f"lv_{component_type}" + if component_type in ["generator", "storage_unit"]: + q_sign = q_control.get_q_sign_generator( + edisgo_obj.config["reactive_power_mode"][config_type] + ) + elif component_type in ["conventional_load", "heat_pump", "charging_point"]: + q_sign = q_control.get_q_sign_load( + edisgo_obj.config["reactive_power_mode"][config_type] + ) + else: + raise ValueError( + "Specified component type is not valid. " + "Must either be 'generator', 'conventional_load', 'charging_point', " + "'heat_pump' or 'storage_unit'." + ) + + # get power factor based on component type + power_factor = edisgo_obj.config["reactive_power_factor"][config_type] + + # Calculate the voltage drop or increase + return calculate_voltage_diff_pu_per_line( + s_max, + r_total, + x_total, + v_nom, + q_sign, + power_factor, + ) + + +def select_cable( + edisgo_obj: EDisGo, + level: str, + apparent_power: float, + component_type: str | None = None, + length: float = 0.0, + max_voltage_diff: float | None = None, + max_cables: int = 7, +) -> tuple[pd.Series, int]: + """ + Selects suitable cable type and quantity based on apparent power and + voltage deviation. + + The cable is selected to carry the given `apparent_power` and to ensure + acceptable voltage deviation over the cable. + Overhead lines are not considered in choosing a suitable cable. Parameters ---------- @@ -209,49 +353,96 @@ def select_cable(edisgo_obj, level, apparent_power): 'lv'. apparent_power : float Apparent power the cable must carry in MVA. + component_type : str + Type of the component to be connected. Possible options are "generator", + "conventional_load", "charging_point", "heat_pump" or "storage_unit". + Only needed in case a cable length is given and thus the voltage difference over + the cable can be taken into account for selecting a suitable cable. In that case + it is used to obtain the default power factor and reactive power mode from the + configuration files in sections `reactive_power_factor` and + `reactive_power_mode`. + Default: None. + length : float + Length of the cable in km. Default: 0. + max_voltage_diff : float + Maximum allowed voltage difference in p.u.. + If None, it defaults to the value specified in the configuration file + under the `grid_connection` section for the respective voltage level + (lv_max_voltage_deviation for LV and mv_max_voltage_deviation for MV). + Default: None. + max_cables : int + Maximum number of cables to consider. Default: 7. Returns ------- - :pandas:`pandas.Series` - Series with attributes of selected cable as in equipment data and - cable type as series name. - int - Number of necessary parallel cables. + tuple[:pandas:`pandas.Series`, int] + A tuple containing information on the selected cable type and the quantity + needed. """ - - cable_count = 1 - if level == "mv": cable_data = edisgo_obj.topology.equipment_data["mv_cables"] available_cables = cable_data[ cable_data["U_n"] == edisgo_obj.topology.mv_grid.nominal_voltage ] + if not max_voltage_diff: + max_voltage_diff = edisgo_obj.config["grid_connection"][ + "mv_max_voltage_deviation" + ] elif level == "lv": available_cables = edisgo_obj.topology.equipment_data["lv_cables"] + if not max_voltage_diff: + max_voltage_diff = edisgo_obj.config["grid_connection"][ + "lv_max_voltage_deviation" + ] else: raise ValueError( "Specified voltage level is not valid. Must either be 'mv' or 'lv'." ) + cable_count = 1 suitable_cables = available_cables[ calculate_apparent_power( available_cables["U_n"], available_cables["I_max_th"], cable_count ) > apparent_power ] + if length != 0: + suitable_cables = suitable_cables[ + calculate_voltage_diff_pu_per_line_from_type( + edisgo_obj=edisgo_obj, + cable_names=suitable_cables.index, + length=length, + num_parallel=cable_count, + v_nom=available_cables["U_n"].values[0], + s_max=apparent_power, + component_type=component_type, + ) + < max_voltage_diff + ] # increase cable count until appropriate cable type is found - while suitable_cables.empty and cable_count < 7: + while suitable_cables.empty and cable_count < max_cables: # parameter cable_count += 1 suitable_cables = available_cables[ calculate_apparent_power( - available_cables["U_n"], - available_cables["I_max_th"], - cable_count, + available_cables["U_n"], available_cables["I_max_th"], cable_count ) > apparent_power ] + if length != 0: + suitable_cables = suitable_cables[ + calculate_voltage_diff_pu_per_line_from_type( + edisgo_obj=edisgo_obj, + cable_names=available_cables.index, + length=length, + num_parallel=cable_count, + v_nom=available_cables["U_n"].values[0], + s_max=apparent_power, + component_type=component_type, + ) + < max_voltage_diff + ] if suitable_cables.empty: raise exceptions.MaximumIterationError( "Could not find a suitable cable for apparent power of " @@ -1119,7 +1310,7 @@ def reduce_memory_usage(df: pd.DataFrame, show_reduction: bool = False) -> pd.Da be reduced to a smaller data type. Source: - https://www.mikulskibartosz.name/how-to-reduce-memory-usage-in-pandas/ + https://mikulskibartosz.name/how-to-reduce-memory-usage-in-pandas Parameters ---------- diff --git a/examples/edisgo_simple_example.ipynb b/examples/edisgo_simple_example.ipynb index b0a68fc6..c7ee79ce 100644 --- a/examples/edisgo_simple_example.ipynb +++ b/examples/edisgo_simple_example.ipynb @@ -112,7 +112,7 @@ "Currently, synthetic grid data generated with the python project\n", "[ding0](https://github.com/openego/ding0)\n", "is the only supported data source for distribution grid data. ding0 provides the grid topology data in the form of csv files, with separate files for buses, lines, loads, generators, etc. You can retrieve ding0 data from\n", - "[Zenodo](https://zenodo.org/record/890479)\n", + "[Zenodo](https://zenodo.org/records/890479)\n", "(make sure you choose latest data) or check out the\n", "[Ding0 documentation](https://dingo.readthedocs.io/en/dev/usage_details.html#ding0-examples)\n", "on how to generate grids yourself. A ding0 example grid can be viewed [here](https://github.com/openego/eDisGo/tree/dev/tests/data/ding0_test_network_2). It is possible to provide your own grid data if it is in the same format as the ding0 grid data. \n", diff --git a/examples/electromobility_example.ipynb b/examples/electromobility_example.ipynb index 2581f199..d0c5cc25 100644 --- a/examples/electromobility_example.ipynb +++ b/examples/electromobility_example.ipynb @@ -430,8 +430,7 @@ " url = (f\"https://github.com/openego/eDisGo/tree/dev/\" +\n", " f\"tests/data/simbev_example_scenario/{ags}/\")\n", " page = requests.get(url).text\n", - " items = json.loads(page)[\"payload\"][\"tree\"][\"items\"]\n", - " filenames = [f[\"name\"] for f in items if \"csv\" in f[\"name\"]]\n", + " filenames = [_ for _ in page.split('\"') if \".csv\" in _ and \"/\" not in _]\n", "\n", " for file in filenames:\n", " req = requests.get(f\"{raw_url}/{ags}/{file}\")\n", @@ -470,8 +469,7 @@ " url = (\"https://github.com/openego/eDisGo/tree/dev/\" +\n", " \"tests/data/tracbev_example_scenario/\")\n", " page = requests.get(url).text\n", - " items = json.loads(page)[\"payload\"][\"tree\"][\"items\"]\n", - " filenames = [f[\"name\"] for f in items if \"gpkg\" in f[\"name\"]]\n", + " filenames = [_ for _ in page.split('\"') if \".gpkg\" in _ and \"/\" not in _]\n", "\n", " for file in filenames:\n", " req = requests.get(\n", diff --git a/rtd_requirements.txt b/rtd_requirements.txt index dd3c393d..3344a499 100644 --- a/rtd_requirements.txt +++ b/rtd_requirements.txt @@ -8,13 +8,12 @@ multiprocess networkx >= 2.5.0 pandas >= 1.4.0 plotly -pyomo >= 6.0 pypower pyproj >= 3.0.0 -pypsa >=0.17.0, <=0.20.1 +pypsa == 0.26.2 pyyaml saio -scikit-learn +scikit-learn < 1.3.0 sphinx sphinx_rtd_theme >=0.5.2 sphinx-autodoc-typehints diff --git a/setup.py b/setup.py index e28c1469..082fbe8b 100644 --- a/setup.py +++ b/setup.py @@ -4,9 +4,9 @@ from setuptools import find_packages, setup -if sys.version_info[:2] < (3, 8): +if sys.version_info[:2] < (3, 9): error = ( - "eDisGo requires Python 3.8 or later (%d.%d detected)." % sys.version_info[:2] + "eDisGo requires Python 3.9 or later (%d.%d detected)." % sys.version_info[:2] ) sys.stderr.write(error + "\n") sys.exit(1) @@ -44,17 +44,18 @@ def read(fname): "matplotlib >= 3.3.0", "multiprocess", "networkx >= 2.5.0", - "pandas >= 1.4.0", + # newer pandas versions don't work with specified sqlalchemy versions, but upgrading + # sqlalchemy leads to new errors.. should be fixed at some point + "pandas >= 1.4.0, < 2.2.0", "plotly", "pydot", "pygeos", - "pyomo <= 6.4.2", # Problem with PyPSA 20.1 fixed in newest PyPSA release "pypower", "pyproj >= 3.0.0", - "pypsa >= 0.17.0, <= 0.20.1", + "pypsa == 0.26.2", "pyyaml", "saio", - "scikit-learn <= 1.1.1", + "scikit-learn < 1.3.0", "shapely >= 1.7.0", "sqlalchemy < 1.4.0", "sshtunnel", diff --git a/tests/conftest.py b/tests/conftest.py index 6809eb81..998adaeb 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -57,6 +57,9 @@ def pytest_addoption(parser): default=False, help="run tests that only work locally", ) + parser.addoption( + "--runoedbtest", action="store_true", default=False, help="Run OEDB tests" + ) def pytest_collection_modifyitems(config, items): @@ -75,3 +78,8 @@ def pytest_collection_modifyitems(config, items): for item in items: if "runonlinux" in item.keywords: item.add_marker(skip_windows) + if not config.getoption("--runoedbtest"): + skip_oedbtest = pytest.mark.skip(reason="need --runoedbtest option to run") + for item in items: + if "oedbtest" in item.keywords: + item.add_marker(skip_oedbtest) diff --git a/tests/flex_opt/test_costs.py b/tests/flex_opt/test_costs.py index 813714aa..308f9e7e 100644 --- a/tests/flex_opt/test_costs.py +++ b/tests/flex_opt/test_costs.py @@ -3,7 +3,7 @@ import pytest from edisgo import EDisGo -from edisgo.flex_opt.costs import grid_expansion_costs, line_expansion_costs +from edisgo.flex_opt import costs as costs_mod class TestCosts: @@ -76,12 +76,12 @@ def test_costs(self): ], ) - costs = grid_expansion_costs(self.edisgo) + costs = costs_mod.grid_expansion_costs(self.edisgo) assert len(costs) == 4 assert ( costs.loc["MVStation_1_transformer_reinforced_2", "voltage_level"] - == "mv/lv" + == "hv/mv" ) assert costs.loc["MVStation_1_transformer_reinforced_2", "quantity"] == 1 assert costs.loc["MVStation_1_transformer_reinforced_2", "total_costs"] == 1000 @@ -97,13 +97,13 @@ def test_costs(self): assert costs.loc["Line_10019", "type"] == "48-AL1/8-ST1A" assert costs.loc["Line_10019", "voltage_level"] == "mv" assert np.isclose(costs.loc["Line_50000002", "total_costs"], 2.34) - assert np.isclose(costs.loc["Line_50000002", "length"], 0.09) - assert costs.loc["Line_50000002", "quantity"] == 3 + assert np.isclose(costs.loc["Line_50000002", "length"], 0.03) + assert costs.loc["Line_50000002", "quantity"] == 1 assert costs.loc["Line_50000002", "type"] == "NAYY 4x1x35" assert costs.loc["Line_50000002", "voltage_level"] == "lv" def test_line_expansion_costs(self): - costs = line_expansion_costs(self.edisgo) + costs = costs_mod.line_expansion_costs(self.edisgo) assert len(costs) == len(self.edisgo.topology.lines_df) assert (costs.index == self.edisgo.topology.lines_df.index).all() assert len(costs[costs.voltage_level == "mv"]) == len( @@ -116,7 +116,9 @@ def test_line_expansion_costs(self): assert np.isclose(costs.at["Line_10000015", "costs_cable"], 0.27) assert costs.at["Line_10000015", "voltage_level"] == "lv" - costs = line_expansion_costs(self.edisgo, ["Line_10003", "Line_10000015"]) + costs = costs_mod.line_expansion_costs( + self.edisgo, ["Line_10003", "Line_10000015"] + ) assert len(costs) == 2 assert (costs.index.values == ["Line_10003", "Line_10000015"]).all() assert np.isclose(costs.at["Line_10003", "costs_earthworks"], 0.083904 * 60) @@ -125,3 +127,28 @@ def test_line_expansion_costs(self): assert np.isclose(costs.at["Line_10000015", "costs_earthworks"], 1.53) assert np.isclose(costs.at["Line_10000015", "costs_cable"], 0.27) assert costs.at["Line_10000015", "voltage_level"] == "lv" + + def test_transformer_expansion_costs(self): + costs = costs_mod.transformer_expansion_costs(self.edisgo) + transformers_df = pd.concat( + [ + self.edisgo.topology.transformers_df, + self.edisgo.topology.transformers_hvmv_df, + ] + ) + assert len(costs) == len(transformers_df) + assert sorted(costs.index) == sorted(transformers_df.index) + assert len(costs[costs.voltage_level == "hv/mv"]) == len( + self.edisgo.topology.transformers_hvmv_df + ) + assert np.isclose(costs.at["MVStation_1_transformer_1", "costs"], 1000) + assert costs.at["MVStation_1_transformer_1", "voltage_level"] == "hv/mv" + assert np.isclose(costs.at["LVStation_4_transformer_2", "costs"], 10) + assert costs.at["LVStation_4_transformer_2", "voltage_level"] == "mv/lv" + + costs = costs_mod.transformer_expansion_costs( + self.edisgo, ["LVStation_5_transformer_1"] + ) + assert len(costs) == 1 + assert np.isclose(costs.at["LVStation_5_transformer_1", "costs"], 10) + assert costs.at["LVStation_5_transformer_1", "voltage_level"] == "mv/lv" diff --git a/tests/flex_opt/test_q_control.py b/tests/flex_opt/test_q_control.py index 9595ec1c..a028c154 100644 --- a/tests/flex_opt/test_q_control.py +++ b/tests/flex_opt/test_q_control.py @@ -101,7 +101,7 @@ def test__fixed_cosphi_default_power_factor( # test for component_type="generators" pf = q_control._fixed_cosphi_default_power_factor( - comp_df=df, component_type="generators", configs=config + comp_df=df, component_type="generator", configs=config ) assert pf.shape == (3,) @@ -112,7 +112,7 @@ def test__fixed_cosphi_default_power_factor( # test for component_type="loads" pf = q_control._fixed_cosphi_default_power_factor( - comp_df=df, component_type="conventional_loads", configs=config + comp_df=df, component_type="conventional_load", configs=config ) assert pf.shape == (3,) @@ -123,7 +123,7 @@ def test__fixed_cosphi_default_power_factor( # test for component_type="charging_points" pf = q_control._fixed_cosphi_default_power_factor( - comp_df=df, component_type="charging_points", configs=config + comp_df=df, component_type="charging_point", configs=config ) assert pf.shape == (3,) @@ -134,7 +134,7 @@ def test__fixed_cosphi_default_power_factor( # test for component_type="heat_pumps" pf = q_control._fixed_cosphi_default_power_factor( - comp_df=df, component_type="heat_pumps", configs=config + comp_df=df, component_type="heat_pump", configs=config ) assert pf.shape == (3,) @@ -145,7 +145,7 @@ def test__fixed_cosphi_default_power_factor( # test for component_type="storage_units" pf = q_control._fixed_cosphi_default_power_factor( - comp_df=df, component_type="storage_units", configs=config + comp_df=df, component_type="storage_unit", configs=config ) assert pf.shape == (3,) @@ -165,7 +165,7 @@ def test__fixed_cosphi_default_reactive_power_sign( # test for component_type="generators" pf = q_control._fixed_cosphi_default_reactive_power_sign( - comp_df=df, component_type="generators", configs=config + comp_df=df, component_type="generator", configs=config ) assert pf.shape == (3,) @@ -176,7 +176,7 @@ def test__fixed_cosphi_default_reactive_power_sign( # test for component_type="conventional_loads" pf = q_control._fixed_cosphi_default_reactive_power_sign( - comp_df=df, component_type="conventional_loads", configs=config + comp_df=df, component_type="conventional_load", configs=config ) assert pf.shape == (3,) @@ -187,7 +187,7 @@ def test__fixed_cosphi_default_reactive_power_sign( # test for component_type="charging_points" pf = q_control._fixed_cosphi_default_reactive_power_sign( - comp_df=df, component_type="charging_points", configs=config + comp_df=df, component_type="charging_point", configs=config ) assert pf.shape == (3,) @@ -198,7 +198,7 @@ def test__fixed_cosphi_default_reactive_power_sign( # test for component_type="heat_pumps" pf = q_control._fixed_cosphi_default_reactive_power_sign( - comp_df=df, component_type="heat_pumps", configs=config + comp_df=df, component_type="heat_pump", configs=config ) assert pf.shape == (3,) @@ -209,7 +209,7 @@ def test__fixed_cosphi_default_reactive_power_sign( # test for component_type="storage_units" pf = q_control._fixed_cosphi_default_reactive_power_sign( - comp_df=df, component_type="storage_units", configs=config + comp_df=df, component_type="storage_unit", configs=config ) assert pf.shape == (3,) diff --git a/tests/flex_opt/test_reinforce_grid.py b/tests/flex_opt/test_reinforce_grid.py index dd4ca4cb..e073b232 100644 --- a/tests/flex_opt/test_reinforce_grid.py +++ b/tests/flex_opt/test_reinforce_grid.py @@ -58,12 +58,10 @@ def test_reinforce_grid(self): # test reduced analysis res_reduced = reinforce_grid( edisgo=copy.deepcopy(self.edisgo), - timesteps_pfa="reduced_analysis", - num_steps_loading=4, - ) - assert_frame_equal( - res_reduced.equipment_changes, results_dict[None].equipment_changes + reduced_analysis=True, + num_steps_loading=2, ) + assert len(res_reduced.i_res) == 2 def test_run_separate_lv_grids(self): edisgo = copy.deepcopy(self.edisgo) @@ -83,7 +81,7 @@ def test_run_separate_lv_grids(self): assert len(g.buses_df) > 1 assert len(lv_grids_new) == 26 - assert np.isclose(edisgo.results.grid_expansion_costs.total_costs.sum(), 280.06) + assert np.isclose(edisgo.results.grid_expansion_costs.total_costs.sum(), 440.06) # check if all generators are still present assert np.isclose( diff --git a/tests/io/test_generators_import.py b/tests/io/test_generators_import.py index 9adfdde1..51ee4b5e 100644 --- a/tests/io/test_generators_import.py +++ b/tests/io/test_generators_import.py @@ -484,6 +484,7 @@ class TestGeneratorsImportOEDB: """ @pytest.mark.slow + @pytest.mark.oedbtest def test_oedb_legacy_without_timeseries(self): edisgo = EDisGo( ding0_grid=pytest.ding0_test_network_2_path, @@ -497,6 +498,7 @@ def test_oedb_legacy_without_timeseries(self): assert np.isclose(edisgo.topology.generators_df.p_nom.sum(), 20.18783) @pytest.mark.slow + @pytest.mark.oedbtest def test_oedb_legacy_with_worst_case_timeseries(self): edisgo = EDisGo(ding0_grid=pytest.ding0_test_network_2_path) edisgo.set_time_series_worst_case_analysis() @@ -568,6 +570,7 @@ def test_oedb_legacy_with_worst_case_timeseries(self): # :, new_solar_gen.name] / new_solar_gen.p_nom).all() @pytest.mark.slow + @pytest.mark.oedbtest def test_oedb_legacy_with_timeseries_by_technology(self): timeindex = pd.date_range("1/1/2012", periods=3, freq="H") ts_gen_dispatchable = pd.DataFrame( @@ -647,6 +650,7 @@ def test_oedb_legacy_with_timeseries_by_technology(self): # :, new_solar_gen.name] / new_solar_gen.p_nom).all() @pytest.mark.slow + @pytest.mark.oedbtest def test_target_capacity(self): edisgo = EDisGo( ding0_grid=pytest.ding0_test_network_2_path, diff --git a/tests/io/test_powermodels_io.py b/tests/io/test_powermodels_io.py index 4d0d7842..b3bfab03 100644 --- a/tests/io/test_powermodels_io.py +++ b/tests/io/test_powermodels_io.py @@ -310,7 +310,7 @@ def test__get_pf(self): # test mode None powermodels_network, hv_flex_dict = powermodels_io.to_powermodels(self.edisgo) - for component in ["gen", "storage"]: + for component in ["generator", "storage_unit"]: pf, sign = powermodels_io._get_pf( self.edisgo, powermodels_network, 1, component ) @@ -322,10 +322,10 @@ def test__get_pf(self): assert pf == 0.95 assert sign == -1 - for component in ["hp", "cp"]: + for component in ["heat_pump", "charging_point"]: for bus in [1, 29]: pf, sign = powermodels_io._get_pf( - self.edisgo, powermodels_network, 1, component + self.edisgo, powermodels_network, bus, component ) assert pf == 1 assert sign == 1 diff --git a/tests/io/test_timeseries_import.py b/tests/io/test_timeseries_import.py index 6c88f54b..c6abb872 100644 --- a/tests/io/test_timeseries_import.py +++ b/tests/io/test_timeseries_import.py @@ -59,6 +59,7 @@ def test__timeindex_helper_func(self): assert_index_equal(ind, given_index) assert_index_equal(ind_full, timeindex) + @pytest.mark.oedbtest def test_feedin_oedb_legacy(self): edisgo = EDisGo(ding0_grid=pytest.ding0_test_network_path) timeindex = pd.date_range("1/1/2010", periods=3000, freq="H") @@ -88,16 +89,20 @@ def test_feedin_oedb(self): def test_load_time_series_demandlib(self): edisgo = EDisGo(ding0_grid=pytest.ding0_test_network_path) - timeindex = pd.date_range("1/1/2018", periods=7000, freq="H") + timeindex = pd.date_range("1/1/2018", periods=8760, freq="H") load = timeseries_import.load_time_series_demandlib(edisgo, timeindex) assert ( load.columns == ["cts", "residential", "agricultural", "industrial"] ).all() - assert len(load) == 7000 + assert len(load) == 8760 assert np.isclose(load.loc[timeindex[453], "cts"], 8.33507e-05) assert np.isclose(load.loc[timeindex[13], "residential"], 1.73151e-04) assert np.isclose(load.loc[timeindex[6328], "agricultural"], 1.01346e-04) - assert np.isclose(load.loc[timeindex[4325], "industrial"], 9.91768e-05) + assert np.isclose(load.loc[timeindex[4325], "industrial"], 9.87654320e-05) + assert np.isclose(load.sum()["cts"], 1.0) + assert np.isclose(load.sum()["residential"], 1.0) + assert np.isclose(load.sum()["agricultural"], 1.0) + assert np.isclose(load.sum()["industrial"], 1.0) @pytest.mark.local def test_cop_oedb(self): @@ -268,6 +273,13 @@ def test_get_residential_electricity_profiles_per_building(self): assert df.shape == (8760, 1) assert np.isclose(df.loc[:, 442081].sum(), 3.20688, atol=1e-3) + # test with status quo + df = timeseries_import.get_residential_electricity_profiles_per_building( + [-1, 442081], "eGon2021", pytest.engine + ) + assert df.shape == (8760, 1) + assert np.isclose(df.loc[:, 442081].sum(), 4.288845, atol=1e-3) + @pytest.mark.local def test_get_industrial_electricity_profiles_per_site(self): # test with one site and one OSM area @@ -283,3 +295,11 @@ def test_get_industrial_electricity_profiles_per_site(self): [541658], "eGon2035", pytest.engine ) assert df.shape == (8760, 1) + + # test with status quo + df = timeseries_import.get_industrial_electricity_profiles_per_site( + [1, 541658], "eGon2021", pytest.engine + ) + assert df.shape == (8760, 2) + assert np.isclose(df.loc[:, 1].sum(), 31655.640, atol=1e-3) + assert np.isclose(df.loc[:, 541658].sum(), 2910.816, atol=1e-3) diff --git a/tests/network/test_timeseries.py b/tests/network/test_timeseries.py index 5fc15ea4..b8d7c4c2 100644 --- a/tests/network/test_timeseries.py +++ b/tests/network/test_timeseries.py @@ -1235,6 +1235,7 @@ def test_worst_case_storage_units(self): ) @pytest.mark.slow + @pytest.mark.oedbtest def test_predefined_fluctuating_generators_by_technology(self): timeindex = pd.date_range("1/1/2011 12:00", periods=2, freq="H") self.edisgo.timeseries.timeindex = timeindex @@ -1564,9 +1565,9 @@ def test_predefined_conventional_loads_by_sector(self, caplog): index=index, columns=["cts", "residential", "agricultural", "industrial"], data=[ - [0.0000597, 0.0000782, 0.0000654, 0.0000992], - [0.0000526, 0.0000563, 0.0000611, 0.0000992], - [0.0000459, 0.0000451, 0.0000585, 0.0000992], + [0.000059711, 0.0000782190, 0.00006540, 0.00009876], + [0.000052590, 0.0000563428, 0.00006110, 0.00009876], + [0.000045927, 0.0000451043, 0.00005843, 0.00009876], ], ) @@ -1655,7 +1656,7 @@ def test_predefined_conventional_loads_by_sector(self, caplog): self.edisgo.timeseries.loads_active_power[ "Load_industrial_LVGrid_6_1" ].values, - [0.05752256] * 3, + [0.05728395] * 3, ).all() assert np.isclose( self.edisgo.timeseries.loads_active_power.loc[ @@ -1793,6 +1794,26 @@ def test_predefined_conventional_loads_by_sector(self, caplog): ).values, ).all() + # test Error if 'annual_consumption' is missing + # Save the original 'annual_consumption' values + original_annual_consumption = self.edisgo.topology.loads_df[ + "annual_consumption" + ].copy() + # Set 'annual_consumption' to None for the test + self.edisgo.topology.loads_df["annual_consumption"] = None + with pytest.raises(AttributeError) as exc_info: + self.edisgo.timeseries.predefined_conventional_loads_by_sector( + self.edisgo, "demandlib" + ) + assert ( + exc_info.value.args[0] + == "The annual consumption of some loads is missing. Please provide" + ) + # Restore the original 'annual_consumption' values + self.edisgo.topology.loads_df[ + "annual_consumption" + ] = original_annual_consumption + def test_predefined_charging_points_by_use_case(self, caplog): index = pd.date_range("1/1/2018", periods=3, freq="H") self.edisgo.timeseries.timeindex = index diff --git a/tests/network/test_topology.py b/tests/network/test_topology.py index 0baf02f3..1a902360 100644 --- a/tests/network/test_topology.py +++ b/tests/network/test_topology.py @@ -951,9 +951,17 @@ def setup_class(self): self.edisgo.set_time_series_worst_case_analysis() def test_to_geopandas(self): - geopandas_container = self.edisgo.topology.to_geopandas() + # further tests of to_geopandas are conducted in test_geopandas_helper.py - assert isinstance(geopandas_container, GeoPandasGridContainer) + # set up edisgo object with georeferenced LV + edisgo_geo = EDisGo( + ding0_grid=pytest.ding0_test_network_3_path, legacy_ding0_grids=False + ) + test_suits = { + "mv": {"edisgo_obj": self.edisgo, "mode": "mv", "lv_grid_id": None}, + "lv": {"edisgo_obj": edisgo_geo, "mode": "lv", "lv_grid_id": 1164120002}, + "mv+lv": {"edisgo_obj": edisgo_geo, "mode": None, "lv_grid_id": None}, + } attrs = [ "buses_gdf", @@ -964,19 +972,30 @@ def test_to_geopandas(self): "transformers_gdf", ] - for attr_str in attrs: - attr = getattr(geopandas_container, attr_str) - grid_attr = getattr( - self.edisgo.topology.mv_grid, attr_str.replace("_gdf", "_df") + for test_suit, params in test_suits.items(): + # call to_geopandas() function with different settings + geopandas_container = params["edisgo_obj"].topology.to_geopandas( + mode=params["mode"], lv_grid_id=params["lv_grid_id"] ) - assert isinstance(attr, GeoDataFrame) + assert isinstance(geopandas_container, GeoPandasGridContainer) - common_cols = list(set(attr.columns).intersection(grid_attr.columns)) + # check that content of geodataframes is the same as content of original + # dataframes + for attr_str in attrs: + grid = getattr(geopandas_container, "grid") + attr = getattr(geopandas_container, attr_str) + grid_attr = getattr(grid, attr_str.replace("_gdf", "_df")) - assert_frame_equal( - attr[common_cols], grid_attr[common_cols], check_names=False - ) + assert isinstance(attr, GeoDataFrame) + + common_cols = list(set(attr.columns).intersection(grid_attr.columns)) + + assert_frame_equal( + attr[common_cols].sort_index(), + grid_attr[common_cols].sort_index(), + check_names=False, + ) def test_from_csv(self): """ @@ -1720,7 +1739,7 @@ def test_connect_to_lv(self): loads_before = self.edisgo.topology.loads_df test_hp = { - "p_set": 0.3, + "p_set": 0.1, "geom": geom, "voltage_level": 6, "mvlv_subst_id": 6, @@ -1751,7 +1770,7 @@ def test_connect_to_lv(self): new_line_df.loc[new_line_df.index[0], ["bus0", "bus1"]] ) # check new heat pump - assert self.edisgo.topology.loads_df.at[comp_name, "p_set"] == 0.3 + assert self.edisgo.topology.loads_df.at[comp_name, "p_set"] == 0.1 # ############# storage unit ################# # test existing substation ID (voltage level 7) @@ -1909,3 +1928,25 @@ def test_check_integrity(self, caplog): assert "There are lines with very short line lengths" in caplog.text assert "Very small values for impedance of lines" and line in caplog.text caplog.clear() + + def test_find_meshes(self, caplog: pytest.LogCaptureFixture): + meshes = Topology.find_meshes(self.edisgo) + assert not meshes + self.edisgo.topology.add_line( + "Bus_GeneratorFluctuating_2", + "Bus_GeneratorFluctuating_6", + 0.1, + x=0.1, + r=0.1, + ) + meshes = Topology.find_meshes(self.edisgo) + assert len(meshes) == 1 + assert "Bus_GeneratorFluctuating_2" in meshes[0] + assert "Bus_GeneratorFluctuating_6" in meshes[0] + self.edisgo.topology.add_line( + "Bus_BranchTee_LVGrid_2_3", "Bus_BranchTee_LVGrid_3_3", 0.1, x=0.1, r=0.1 + ) + meshes = Topology.find_meshes(self.edisgo) + assert len(meshes) == 2 + assert "Bus_BranchTee_LVGrid_2_3" in meshes[1] + assert "Grid contains mesh(es)." in caplog.text diff --git a/tests/test_edisgo.py b/tests/test_edisgo.py index e2642bf9..dbfa03a3 100755 --- a/tests/test_edisgo.py +++ b/tests/test_edisgo.py @@ -382,6 +382,7 @@ def test_to_graph(self): ) @pytest.mark.slow + @pytest.mark.oedbtest def test_generator_import(self): edisgo = EDisGo(ding0_grid=pytest.ding0_test_network_2_path) edisgo.import_generators("nep2035") @@ -430,6 +431,7 @@ def test_analyze(self, caplog): assert "Current fraction in iterative process: 1.0." in caplog.text def test_reinforce(self): + # ToDo add tests to check content of equipment_changes # ###################### test with default settings ########################## self.setup_worst_case_time_series() results = self.edisgo.reinforce() @@ -520,7 +522,7 @@ def test_reinforce_catch_convergence(self): ) results = self.edisgo.reinforce(catch_convergence_problems=True) assert results.unresolved_issues.empty - assert len(results.grid_expansion_costs) == 132 + assert len(results.grid_expansion_costs) == 134 assert len(results.equipment_changes) == 218 assert results.v_res.shape == (4, 142) @@ -542,10 +544,21 @@ def test_enhanced_reinforce_grid(self): results = edisgo_obj.results - assert len(results.grid_expansion_costs) == 445 + assert len(results.grid_expansion_costs) == 454 assert len(results.equipment_changes) == 892 assert results.v_res.shape == (4, 148) + edisgo_obj = copy.deepcopy(self.edisgo) + edisgo_obj = enhanced_reinforce_grid( + edisgo_obj, + reduced_analysis=True, + is_worst_case=False, + separate_lv_grids=True, + num_steps_loading=1, + num_steps_voltage=1, + ) + assert edisgo_obj.results.v_res.shape == (2, 162) + def test_add_component(self, caplog): self.setup_worst_case_time_series() index = self.edisgo.timeseries.timeindex diff --git a/tests/test_examples.py b/tests/test_examples.py index 8e31d88d..9f0a862e 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -26,6 +26,7 @@ def test_plot_example_ipynb(self): assert result.exec_error is None @pytest.mark.slow + @pytest.mark.oedbtest def test_electromobility_example_ipynb(self): path = os.path.join(self.examples_dir_path, "electromobility_example.ipynb") notebook = pytest_notebook.notebook.load_notebook(path=path) @@ -39,6 +40,7 @@ def test_electromobility_example_ipynb(self): assert result.exec_error is None @pytest.mark.slow + @pytest.mark.oedbtest def test_edisgo_simple_example_ipynb(self): path = os.path.join(self.examples_dir_path, "edisgo_simple_example.ipynb") notebook = pytest_notebook.notebook.load_notebook(path=path) diff --git a/tests/tools/test_geopandas_helper.py b/tests/tools/test_geopandas_helper.py new file mode 100644 index 00000000..a9aca942 --- /dev/null +++ b/tests/tools/test_geopandas_helper.py @@ -0,0 +1,65 @@ +import pytest + +from edisgo import EDisGo +from edisgo.tools import geopandas_helper + + +class TestGeopandasHelper: + @classmethod + def setup_class(self): + self.edisgo = EDisGo(ding0_grid=pytest.ding0_test_network_path) + + def test_to_geopandas(self): + # further tests of this function are conducted in test_topology.py + # test MV grid + data = geopandas_helper.to_geopandas(self.edisgo.topology.mv_grid, 4326) + assert data.buses_gdf.shape[0] == self.edisgo.topology.mv_grid.buses_df.shape[0] + assert ( + data.buses_gdf.shape[1] + == self.edisgo.topology.mv_grid.buses_df.shape[1] + 1 - 2 + ) + assert "geometry" in data.buses_gdf.columns + + assert data.lines_gdf.shape[0] == self.edisgo.topology.mv_grid.lines_df.shape[0] + assert ( + data.lines_gdf.shape[1] + == self.edisgo.topology.mv_grid.lines_df.shape[1] + 2 + ) + assert "geometry" in data.lines_gdf.columns + + assert data.loads_gdf.shape[0] == self.edisgo.topology.mv_grid.loads_df.shape[0] + assert ( + data.loads_gdf.shape[1] + == self.edisgo.topology.mv_grid.loads_df.shape[1] + 2 + ) + assert "geometry" in data.loads_gdf.columns + + assert ( + data.generators_gdf.shape[0] + == self.edisgo.topology.mv_grid.generators_df.shape[0] + ) + assert ( + data.generators_gdf.shape[1] + == self.edisgo.topology.mv_grid.generators_df.shape[1] + 2 + ) + assert "geometry" in data.generators_gdf.columns + + assert ( + data.storage_units_gdf.shape[0] + == self.edisgo.topology.mv_grid.storage_units_df.shape[0] + ) + assert ( + data.storage_units_gdf.shape[1] + == self.edisgo.topology.mv_grid.storage_units_df.shape[1] + 2 + ) + assert "geometry" in data.storage_units_gdf.columns + + assert ( + data.transformers_gdf.shape[0] + == self.edisgo.topology.mv_grid.transformers_df.shape[0] + ) + assert ( + data.transformers_gdf.shape[1] + == self.edisgo.topology.mv_grid.transformers_df.shape[1] + 2 + ) + assert "geometry" in data.transformers_gdf.columns diff --git a/tests/tools/test_logger.py b/tests/tools/test_logger.py index 80e372ee..eee071df 100644 --- a/tests/tools/test_logger.py +++ b/tests/tools/test_logger.py @@ -1,65 +1,85 @@ import logging import os +import pytest + from edisgo.tools.logger import setup_logger +def check_file_output(filename, output): + with open(filename) as file: + last_line = file.readlines()[-1].split(" ")[3:] + last_line = " ".join(last_line) + assert last_line == output + + +def reset_loggers(filename): + logger = logging.getLogger("edisgo") + logger.handlers.clear() + logger.propagate = True + # try removing file - when run on github for Windows removing the file leads + # to a PermissionError + try: + os.remove(filename) + except PermissionError: + pass + + class TestClass: def test_setup_logger(self): - def check_file_output(output): - with open("edisgo.log") as file: - last_line = file.readlines()[-1].split(" ")[3:] - last_line = " ".join(last_line) - assert last_line == output - - def reset_loggers(): - logger = logging.getLogger("edisgo") - logger.propagate = True - logger.handlers.clear() - logger = logging.getLogger() - logger.handlers.clear() - - if os.path.exists("edisgo.log"): - os.remove("edisgo.log") + filename = os.path.join( + os.path.expanduser("~"), ".edisgo", "log", "test_log.log" + ) + if os.path.exists(filename): + os.remove(filename) setup_logger( loggers=[ {"name": "root", "file_level": "debug", "stream_level": "debug"}, {"name": "edisgo", "file_level": "debug", "stream_level": "debug"}, ], - file_name="edisgo.log", + file_name="test_log.log", + log_dir="default", ) logger = logging.getLogger("edisgo") # Test that edisgo logger writes to file. logger.debug("root") - check_file_output("edisgo - DEBUG: root\n") + check_file_output(filename, "edisgo - DEBUG: root\n") # Test that root logger writes to file. logging.debug("root") - check_file_output("root - DEBUG: root\n") + check_file_output(filename, "root - DEBUG: root\n") + + reset_loggers(filename) - # reset_loggers() + @pytest.mark.runonlinux + def test_setup_logger_2(self): + """ + This test is only run on linux, as the log file is written to the user + home directory, which is not allowed when tests are run on github. + + """ + + # delete any existing log files + log_files = [_ for _ in os.listdir(os.getcwd()) if ".log" in _] + for log_file in log_files: + os.remove(log_file) setup_logger( loggers=[ {"name": "edisgo", "file_level": "debug", "stream_level": "debug"}, ], - file_name="edisgo.log", reset_loggers=True, debug_message=True, ) logger = logging.getLogger("edisgo") + + filename = [_ for _ in os.listdir(os.getcwd()) if ".log" in _][0] # Test that edisgo logger writes to file. logger.debug("edisgo") - check_file_output("edisgo - DEBUG: edisgo\n") - # Test that root logger doesn't writes to file. - logging.debug("edisgo") - check_file_output("edisgo - DEBUG: edisgo\n") - - @classmethod - def teardown_class(cls): - logger = logging.getLogger("edisgo") - logger.handlers.clear() - logger.propagate = True + check_file_output(filename, "edisgo - DEBUG: edisgo\n") + # Test that root logger doesn't write to file. + logging.debug("root") + check_file_output(filename, "edisgo - DEBUG: edisgo\n") - os.remove("edisgo.log") + reset_loggers(filename) diff --git a/tests/tools/test_temporal_complexity_reduction.py b/tests/tools/test_temporal_complexity_reduction.py index 462e4c15..0fcbc735 100644 --- a/tests/tools/test_temporal_complexity_reduction.py +++ b/tests/tools/test_temporal_complexity_reduction.py @@ -7,7 +7,7 @@ class TestTemporalComplexityReduction: - @classmethod + @pytest.fixture(autouse=True) def setup_class(self): self.edisgo = EDisGo(ding0_grid=pytest.ding0_test_network_path) self.edisgo.set_time_series_worst_case_analysis() @@ -32,32 +32,75 @@ def setup_class(self): self.edisgo.analyze() def test__scored_most_critical_loading(self): - ts_crit = temp_red._scored_most_critical_loading(self.edisgo) - + ts_crit = temp_red._scored_most_critical_loading( + self.edisgo, weight_by_costs=False + ) assert len(ts_crit) == 180 assert np.isclose(ts_crit.iloc[0], 1.45613) assert np.isclose(ts_crit.iloc[-1], 1.14647) - def test__scored_most_critical_voltage_issues(self): - ts_crit = temp_red._scored_most_critical_voltage_issues(self.edisgo) + ts_crit = temp_red._scored_most_critical_loading(self.edisgo) + assert len(ts_crit) == 180 + assert np.isclose(ts_crit.iloc[0], 190.63611) + assert np.isclose(ts_crit.iloc[-1], 48.13501) + + def test__scored_most_critical_voltage_issues(self): + ts_crit = temp_red._scored_most_critical_voltage_issues( + self.edisgo, weight_by_costs=False + ) assert len(ts_crit) == 120 assert np.isclose(ts_crit.iloc[0], 0.01062258) assert np.isclose(ts_crit.iloc[-1], 0.01062258) + ts_crit = temp_red._scored_most_critical_voltage_issues(self.edisgo) + assert len(ts_crit) == 120 + assert np.isclose(ts_crit.iloc[0], 0.1062258) + assert np.isclose(ts_crit.iloc[-1], 0.1062258) + def test_get_most_critical_time_steps(self): ts_crit = temp_red.get_most_critical_time_steps( - self.edisgo, num_steps_loading=2, num_steps_voltage=2 + self.edisgo, + num_steps_loading=2, + num_steps_voltage=2, + weight_by_costs=False, + run_initial_analyze=False, ) assert len(ts_crit) == 3 + ts_crit = temp_red.get_most_critical_time_steps( + self.edisgo, + num_steps_loading=2, + num_steps_voltage=2, + timesteps=self.edisgo.timeseries.timeindex[:24], + ) + assert len(ts_crit) == 2 + + ts_crit = temp_red.get_most_critical_time_steps( + self.edisgo, + mode="lv", + lv_grid_id=2, + percentage=0.5, + num_steps_voltage=2, + ) + assert len(ts_crit) == 0 + + ts_crit = temp_red.get_most_critical_time_steps( + self.edisgo, + mode="lv", + lv_grid_id=6, + percentage=0.5, + num_steps_voltage=2, + ) + assert len(ts_crit) == 60 + def test__scored_most_critical_loading_time_interval(self): # test with default values ts_crit = temp_red._scored_most_critical_loading_time_interval(self.edisgo, 24) - assert len(ts_crit) == 9 + assert len(ts_crit) == 10 assert ( ts_crit.loc[0, "time_steps"] - == pd.date_range("1/5/2018", periods=24, freq="H") + == pd.date_range("1/8/2018", periods=24, freq="H") ).all() assert np.isclose( ts_crit.loc[0, "percentage_max_overloaded_components"], 0.96479 @@ -77,36 +120,66 @@ def test__scored_most_critical_loading_time_interval(self): ).all() assert ts_crit.loc[0, "percentage_max_overloaded_components"] == 1 + # test without weighting by costs + ts_crit = temp_red._scored_most_critical_loading_time_interval( + self.edisgo, + 48, + weight_by_costs=False, + ) + assert len(ts_crit) == 9 + assert ( + ts_crit.loc[0, "time_steps"] + == pd.date_range("1/5/2018 0:00", periods=48, freq="H") + ).all() + def test__scored_most_critical_voltage_issues_time_interval(self): # test with default values ts_crit = temp_red._scored_most_critical_voltage_issues_time_interval( self.edisgo, 24 ) - assert len(ts_crit) == 9 + assert len(ts_crit) == 5 assert ( ts_crit.loc[0, "time_steps"] == pd.date_range("1/1/2018", periods=24, freq="H") ).all() - assert np.isclose(ts_crit.loc[0, "percentage_buses_max_voltage_deviation"], 1.0) - assert np.isclose(ts_crit.loc[1, "percentage_buses_max_voltage_deviation"], 1.0) + assert ( + ts_crit.loc[:, "percentage_buses_max_voltage_deviation"].values == 1.0 + ).all() # test with non-default values ts_crit = temp_red._scored_most_critical_voltage_issues_time_interval( - self.edisgo, 24, time_step_day_start=4, voltage_deviation_factor=0.5 + self.edisgo, 72, time_step_day_start=4, weight_by_costs=False ) - assert len(ts_crit) == 9 + assert len(ts_crit) == 5 assert ( ts_crit.loc[0, "time_steps"] - == pd.date_range("1/1/2018 4:00", periods=24, freq="H") + == pd.date_range("1/1/2018 4:00", periods=72, freq="H") ).all() - assert np.isclose(ts_crit.loc[0, "percentage_buses_max_voltage_deviation"], 1.0) + + def test__costs_per_line_and_transformer(self): + costs = temp_red._costs_per_line_and_transformer(self.edisgo) + assert len(costs) == 131 + 11 + assert np.isclose(costs["Line_10007"], 0.722445826838636 * 80) + assert np.isclose(costs["LVGrid_1_station"], 10) + + def test__costs_per_feeder(self): + costs = temp_red._costs_per_feeder(self.edisgo) + assert len(costs) == 37 + assert np.isclose(costs["Bus_BranchTee_MVGrid_1_1"], 295.34795) + assert np.isclose(costs["BusBar_MVGrid_1_LVGrid_1_LV"], 10) def test_get_most_critical_time_intervals(self): - self.edisgo.timeseries.timeindex = self.edisgo.timeseries.timeindex[:25] - self.edisgo.timeseries.scale_timeseries(p_scaling_factor=5, q_scaling_factor=5) + self.edisgo.timeseries.scale_timeseries(p_scaling_factor=2, q_scaling_factor=2) steps = temp_red.get_most_critical_time_intervals( - self.edisgo, time_steps_per_time_interval=24 + self.edisgo, time_steps_per_time_interval=24, percentage=0.5 ) - assert len(steps) == 1 - assert len(steps.columns) == 4 + assert len(steps) == 5 + assert ( + steps.loc[0, "time_steps_overloading"] + == pd.date_range("1/8/2018", periods=24, freq="H") + ).all() + assert ( + steps.loc[0, "time_steps_voltage_issues"] + == pd.date_range("1/1/2018", periods=24, freq="H") + ).all() diff --git a/tests/tools/test_tools.py b/tests/tools/test_tools.py index 606c60d8..66216ca6 100644 --- a/tests/tools/test_tools.py +++ b/tests/tools/test_tools.py @@ -30,6 +30,184 @@ def test_calculate_line_reactance(self): data = tools.calculate_line_reactance(np.array([2, 3]), 3, 2) assert_allclose(data, np.array([1.88496 / 2, 2.82743 / 2]), rtol=1e-5) + def test_calculate_voltage_diff_pu_per_line(self): + correct_value_positive_sign = 0.03261946832784687 + correct_value_negative_sign = 0.06008053167215312 + r_total = 0.412 + x_total = 0.252 + + # test generator, float + data = tools.calculate_voltage_diff_pu_per_line( + s_max=50, + r_total=r_total, + x_total=x_total, + v_nom=20, + q_sign=-1, + power_factor=0.9, + ) + assert np.isclose(data, correct_value_positive_sign) + # test generator, array + data = tools.calculate_voltage_diff_pu_per_line( + s_max=np.array([50, 50]), + r_total=np.array([r_total, r_total]), + x_total=np.array([x_total, x_total]), + v_nom=20, + q_sign=-1, + power_factor=0.9, + ) + assert_allclose( + data, + np.array([correct_value_positive_sign, correct_value_positive_sign]), + rtol=1e-5, + ) + # test generator, float, higher voltage + data = tools.calculate_voltage_diff_pu_per_line( + s_max=50, + r_total=r_total, + x_total=x_total, + v_nom=40, + q_sign=-1, + power_factor=0.9, + ) + assert np.isclose(data, correct_value_positive_sign / 4) + # test generator, array, larger cable + data = tools.calculate_voltage_diff_pu_per_line( + s_max=np.array([100, 100]), + r_total=np.array([r_total, r_total]), + x_total=np.array([x_total, x_total]), + v_nom=np.array([20, 20]), + q_sign=-1, + power_factor=0.9, + ) + assert_allclose( + data, + np.array( + [correct_value_positive_sign * 2, correct_value_positive_sign * 2] + ), + rtol=1e-5, + ) + # test generator, capacitive + data = tools.calculate_voltage_diff_pu_per_line( + s_max=100, + r_total=r_total, + x_total=x_total, + v_nom=20, + q_sign=1, + power_factor=0.9, + ) + assert np.isclose(data, correct_value_negative_sign * 2) + # test load, capacitive + data = tools.calculate_voltage_diff_pu_per_line( + s_max=100, + r_total=r_total, + x_total=x_total, + v_nom=20, + q_sign=-1, + power_factor=0.9, + ) + assert np.isclose(data, correct_value_positive_sign * 2) + + # test the examples from VDE-AR-N 4105 attachment D + data = tools.calculate_voltage_diff_pu_per_line( + s_max=0.02, + r_total=0.2001, + x_total=0.1258, + v_nom=0.4, + q_sign=-1, + power_factor=1, + ) + assert np.isclose(data, 0.025, rtol=1e-2) + + data = tools.calculate_voltage_diff_pu_per_line( + s_max=0.022, + r_total=0.2001, + x_total=0.1258, + v_nom=0.4, + q_sign=-1, + power_factor=0.9, + ) + assert np.isclose(data, 0.0173, rtol=1e-2) + + def test_calculate_voltage_diff_pu_per_line_from_type(self): + correct_value_negative_sign = 0.4916578234319946 * 1e-2 + correct_value_positive_sign = 0.017583421765680056 + data = tools.calculate_voltage_diff_pu_per_line_from_type( + edisgo_obj=self.edisgo, + cable_names="NA2XS(FL)2Y 3x1x300 RM/25", + length=1, + num_parallel=1, + v_nom=20, + s_max=50, + component_type="generator", + ) + assert np.isclose(data, correct_value_negative_sign) + data = tools.calculate_voltage_diff_pu_per_line_from_type( + edisgo_obj=self.edisgo, + cable_names=np.array( + ["NA2XS(FL)2Y 3x1x300 RM/25", "NA2XS(FL)2Y 3x1x300 RM/25"] + ), + length=1, + num_parallel=1, + v_nom=20, + s_max=50, + component_type="generator", + ) + assert_allclose( + data, + np.array([correct_value_negative_sign, correct_value_negative_sign]), + rtol=1e-5, + ) + data = tools.calculate_voltage_diff_pu_per_line_from_type( + edisgo_obj=self.edisgo, + cable_names="NA2XS(FL)2Y 3x1x300 RM/25", + length=2, + num_parallel=1, + v_nom=20, + s_max=50, + component_type="generator", + ) + assert np.isclose(data, 2 * correct_value_negative_sign) + data = tools.calculate_voltage_diff_pu_per_line_from_type( + edisgo_obj=self.edisgo, + cable_names=np.array( + ["NA2XS(FL)2Y 3x1x300 RM/25", "NA2XS(FL)2Y 3x1x300 RM/25"] + ), + length=2, + num_parallel=1, + v_nom=20, + s_max=50, + component_type="generator", + ) + assert_allclose( + data, + np.array( + [2 * correct_value_negative_sign, 2 * correct_value_negative_sign] + ), + rtol=1e-5, + ) + + data = tools.calculate_voltage_diff_pu_per_line_from_type( + edisgo_obj=self.edisgo, + cable_names="NA2XS(FL)2Y 3x1x300 RM/25", + length=1, + num_parallel=2, + v_nom=20, + s_max=50, + component_type="generator", + ) + assert np.isclose(data, correct_value_negative_sign / 2) + + data = tools.calculate_voltage_diff_pu_per_line_from_type( + edisgo_obj=self.edisgo, + cable_names="NA2XS(FL)2Y 3x1x300 RM/25", + length=1, + num_parallel=2, + v_nom=20, + s_max=50, + component_type="conventional_load", + ) + assert np.isclose(data, correct_value_positive_sign / 2) + def test_calculate_line_resistance(self): # test single line data = tools.calculate_line_resistance(2, 3, 1) @@ -97,18 +275,103 @@ def test_drop_duplicated_columns(self): assert (check_df.loc[:, "a"] == [4, 5, 6]).all() def test_select_cable(self): - cable_data, num_parallel_cables = tools.select_cable(self.edisgo, "mv", 5.1) + # no length given + cable_data, num_parallel_cables = tools.select_cable( + self.edisgo, + "mv", + 5.1, + ) assert cable_data.name == "NA2XS2Y 3x1x150 RE/25" assert num_parallel_cables == 1 - cable_data, num_parallel_cables = tools.select_cable(self.edisgo, "mv", 40) + cable_data, num_parallel_cables = tools.select_cable( + self.edisgo, + "mv", + 40, + ) assert cable_data.name == "NA2XS(FL)2Y 3x1x500 RM/35" assert num_parallel_cables == 2 - cable_data, num_parallel_cables = tools.select_cable(self.edisgo, "lv", 0.18) + cable_data, num_parallel_cables = tools.select_cable( + self.edisgo, + "lv", + 0.18, + ) assert cable_data.name == "NAYY 4x1x150" assert num_parallel_cables == 1 + # length given + cable_data, num_parallel_cables = tools.select_cable( + self.edisgo, + "mv", + 5.1, + length=2, + component_type="conventional_load", + ) + assert cable_data.name == "NA2XS2Y 3x1x150 RE/25" + assert num_parallel_cables == 1 + + cable_data, num_parallel_cables = tools.select_cable( + self.edisgo, + "mv", + 40, + length=1, + component_type="conventional_load", + ) + assert cable_data.name == "NA2XS(FL)2Y 3x1x500 RM/35" + assert num_parallel_cables == 2 + + cable_data, num_parallel_cables = tools.select_cable( + self.edisgo, + "lv", + 0.18, + length=1, + component_type="conventional_load", + ) + assert cable_data.name == "NAYY 4x1x300" + assert num_parallel_cables == 5 + + cable_data, num_parallel_cables = tools.select_cable( + self.edisgo, + "lv", + 0.18, + length=1, + max_voltage_diff=0.01, + max_cables=100, + component_type="conventional_load", + ) + assert cable_data.name == "NAYY 4x1x300" + assert num_parallel_cables == 14 + + cable_data, num_parallel_cables = tools.select_cable( + self.edisgo, + "lv", + 0.18, + length=1, + max_voltage_diff=0.01, + max_cables=100, + component_type="generator", + ) + assert cable_data.name == "NAYY 4x1x300" + assert num_parallel_cables == 8 + + try: + tools.select_cable( + self.edisgo, + "lv", + 0.18, + length=1, + max_voltage_diff=0.01, + max_cables=100, + component_type="fail", + ) + except ValueError as e: + assert ( + str(e) == "Specified component type is not valid. " + "Must either be 'generator', 'conventional_load', 'charging_point', " + "'heat_pump' or 'storage_unit'." + ) + def test_get_downstream_buses(self): # ######## test with LV bus ######## buses_downstream = tools.get_downstream_buses( @@ -168,6 +431,7 @@ def test_determine_bus_voltage_level(self): assert tools.determine_bus_voltage_level(self.edisgo, bus_voltage_level_6) == 6 assert tools.determine_bus_voltage_level(self.edisgo, bus_voltage_level_7) == 7 + @pytest.mark.oedbtest def test_get_weather_cells_intersecting_with_grid_district(self): weather_cells = tools.get_weather_cells_intersecting_with_grid_district( self.edisgo