Skip to content

Commit

Permalink
Update load processing (#211)
Browse files Browse the repository at this point in the history
* build_load_data

* Add documentation

* updating load data import

* Update Config files

* Update load.csv

* Update add_electricity.py

* change log file name

* Update scripts/add_electricity.py

Co-authored-by: FabianHofmann <hofmann@fias.uni-frankfurt.de>

* Update scripts/build_load_data.py

Co-authored-by: FabianHofmann <hofmann@fias.uni-frankfurt.de>

* Update scripts/build_load_data.py

Co-authored-by: FabianHofmann <hofmann@fias.uni-frankfurt.de>

* Update scripts/build_load_data.py

Co-authored-by: FabianHofmann <hofmann@fias.uni-frankfurt.de>

* Update build_load_data.py

* Update build_load_data.py

* Update scripts/build_load_data.py

Co-authored-by: FabianHofmann <hofmann@fias.uni-frankfurt.de>

* update gap handling in build_load_data

* Update build_load_data.py

* Update config.test1.yaml

* update test.config

* Update config.tutorial.yaml

* update load csv function for load data

* Update build_load_data.py

* Update config.test1.yaml

* Update add_electricity.py

* Update build_load_data.py

* Added error messages if load data contains gaps after modifications

* general adjustments:
	- reduce data source to only entsoe statistics
	- sanitize code
	- adjust logging messages
	- adjust daocstrings

* update Snakefile config and docs

* update release notes
rename build_load -> build_load_data in config

* small follow up

* - reintroduce choice between powerstatistics and transparency
- remove load_ timeseries from databundle
- always build load_data
- reinsert scaling factor in config
- fix url to 2019 version

* update doc: configtable, release notes
update config.yaml

* follow up

Co-authored-by: Jan Frederick <jan.frederick.unnewehr@inatech.uni-freiburg.de>
Co-authored-by: JanFrederickUnnewehr <50404069+JanFrederickUnnewehr@users.noreply.github.com>
  • Loading branch information
3 people authored Dec 3, 2020
1 parent f18b7b0 commit 9792069
Show file tree
Hide file tree
Showing 11 changed files with 281 additions and 25 deletions.
10 changes: 7 additions & 3 deletions Snakefile
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,7 @@ datafiles = ['ch_cantons.csv', 'je-e-21.03.02.xls',
'eez/World_EEZ_v8_2014.shp', 'EIA_hydro_generation_2000_2014.csv',
'hydro_capacities.csv', 'naturalearth/ne_10m_admin_0_countries.shp',
'NUTS_2013_60M_SH/data/NUTS_RG_60M_2013.shp', 'nama_10r_3popgdp.tsv.gz',
'nama_10r_3gdp.tsv.gz', 'time_series_60min_singleindex_filtered.csv',
'corine/g250_clc06_V18_5.tif']
'nama_10r_3gdp.tsv.gz', 'corine/g250_clc06_V18_5.tif']

if not config.get('tutorial', False):
datafiles.extend(["natura/Natura2000_end2015.shp", "GEBCO_2014_2D.nc"])
Expand All @@ -65,6 +64,11 @@ if config['enable'].get('retrieve_databundle', True):
log: "logs/retrieve_databundle.log"
script: 'scripts/retrieve_databundle.py'

rule build_load_data:
output: "resources/load.csv"
log: "logs/build_load_data.log"
script: 'scripts/build_load_data.py'

rule build_powerplants:
input:
base_network="networks/base.nc",
Expand Down Expand Up @@ -204,7 +208,7 @@ rule add_electricity:
powerplants='resources/powerplants.csv',
hydro_capacities='data/bundle/hydro_capacities.csv',
geth_hydro_capacities='data/geth2015_hydro_capacities.csv',
opsd_load='data/bundle/time_series_60min_singleindex_filtered.csv',
load='resources/load.csv',
nuts3_shapes='resources/nuts3_shapes.geojson',
**{'profile_' + t: "resources/profile_" + t + ".nc"
for t in config['renewable']}
Expand Down
5 changes: 5 additions & 0 deletions config.default.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,11 @@ transformers:
type: ''

load:
url: https://data.open-power-system-data.org/time_series/2019-06-05/time_series_60min_singleindex.csv
power_statistics: True # only for files from <2019; set false in order to get ENTSOE transparency data
interpolate_limit: 3 # data gaps up until this size are interpolated linearly
time_shift_for_large_gaps: 1w # data gaps up until this size are copied by copying from
manual_adjustments: true # false
scaling_factor: 1.0

costs:
Expand Down
5 changes: 5 additions & 0 deletions config.tutorial.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,11 @@ transformers:
type: ''

load:
url: https://data.open-power-system-data.org/time_series/2019-06-05/time_series_60min_singleindex.csv
power_statistics: True # only for files from <2019; set false in order to get ENTSOE transparency data
interpolate_limit: 3 # data gaps up until this size are interpolated linearly
time_shift_for_large_gaps: 1w # data gaps up until this size are copied by copying from
manual_adjustments: true # false
scaling_factor: 1.0

costs:
Expand Down
7 changes: 6 additions & 1 deletion doc/configtables/load.csv
Original file line number Diff line number Diff line change
@@ -1,2 +1,7 @@
,Unit,Values,Description
scaling_factor,--,float,"Global correction factor for the load time series."
url,--,string,"Link to open power system data time series data."
power_statistics,bool,"{true, false}",Whether to load the electricity consumption data of the ENTSOE power statistics (only for files from 2019 and before) or from the ENTSOE transparency data (only has load data from 2015 onwards).
interpolate_limit,hours,integer,"Maximum gap size (consecutive nans) which interpolated linearly."
time_shift_for_large_gaps,string,string,"Periods which are used for copying time-slices in order to fill large gaps of nans. Have to be valid ``pandas`` period strings."
manual_adjustments,bool,"{true, false}","Whether to adjust the load data manually according to the function in :func:`manual_adjustment`."
scaling_factor,--,float,"Global correction factor for the load time series."
10 changes: 5 additions & 5 deletions doc/configuration.rst
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,7 @@ Specifies the temporal range to build an energy system model for as arguments to

.. literalinclude:: ../config.default.yaml
:language: yaml
:lines: 170-171
:lines: 170-174

.. csv-table::
:header-rows: 1
Expand All @@ -232,7 +232,7 @@ Specifies the temporal range to build an energy system model for as arguments to

.. literalinclude:: ../config.default.yaml
:language: yaml
:lines: 173-185
:lines: 175-188

.. csv-table::
:header-rows: 1
Expand All @@ -254,7 +254,7 @@ Specifies the temporal range to build an energy system model for as arguments to

.. literalinclude:: ../config.default.yaml
:language: yaml
:lines: 187-197
:lines: 190-200

.. csv-table::
:header-rows: 1
Expand All @@ -266,7 +266,7 @@ Specifies the temporal range to build an energy system model for as arguments to

.. literalinclude:: ../config.default.yaml
:language: yaml
:lines: 187,198-214
:lines: 190,201-217

.. csv-table::
:header-rows: 1
Expand All @@ -280,7 +280,7 @@ Specifies the temporal range to build an energy system model for as arguments to

.. literalinclude:: ../config.default.yaml
:language: yaml
:lines: 216-355
:lines: 219-358

.. csv-table::
:header-rows: 1
Expand Down
1 change: 1 addition & 0 deletions doc/preparation.rst
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ together into a detailed PyPSA network stored in ``networks/elec.nc``.

preparation/retrieve
preparation/build_shapes
preparation/build_load_data
preparation/build_cutout
preparation/build_natura_raster
preparation/prepare_links_p_nom
Expand Down
12 changes: 12 additions & 0 deletions doc/preparation/build_load_data.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
..
SPDX-FileCopyrightText: 2020-2021 The PyPSA-Eur Authors
SPDX-License-Identifier: CC-BY-4.0

.. _load_data:

Rule ``build_load_data``
=============================


.. automodule:: build_load_data
3 changes: 3 additions & 0 deletions doc/release_notes.rst
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,9 @@ Upcoming Release

* Modelling hydrogen and battery storage with Store and Link components is now the default, rather than using StorageUnit components with fixed power-to-energy ratio (`#205 <https://github.com/PyPSA/pypsa-eur/pull/205>`_).

* Electricity consumption data is now directly retrieved from the `OPSD website <https://data.open-power-system-data.org/time_series/2019-06-05>`_ using the rule ``build_load_data``. The user can decide whether to take the ENTSOE power statistics data (defaul) or the ENTSOE transparency data.



PyPSA-Eur 0.2.0 (8th June 2020)
==================================
Expand Down
24 changes: 8 additions & 16 deletions scripts/add_electricity.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,14 +53,9 @@
:scale: 34 %
- ``data/geth2015_hydro_capacities.csv``: alternative to capacities above; NOT CURRENTLY USED!
- ``data/bundle/time_series_60min_singleindex_filtered.csv``: Hourly per-country load profiles since 2010 from the `ENTSO-E statistical database <https://www.entsoe.eu/data/power-stats/hourly_load/>`_
.. image:: ../img/load-box.png
:scale: 33 %
.. image:: ../img/load-ts.png
:scale: 33 %
- ``resources/opsd_load.csv`` Hourly per-country load profiles.
- ``resources/regions_onshore.geojson``: confer :ref:`busregions`
- ``resources/nuts3_shapes.geojson``: confer :ref:`shapes`
- ``resources/powerplants.csv``: confer :ref:`powerplants`
Expand Down Expand Up @@ -91,7 +86,6 @@
"""

from vresutils.costdata import annuity
from vresutils.load import timeseries_opsd
from vresutils import transfer as vtransfer

import logging
Expand Down Expand Up @@ -200,7 +194,6 @@ def load_powerplants(ppl_fn=None):
.rename(columns=str.lower).drop(columns=['efficiency'])
.replace({'carrier': carrier_dict}))


# =============================================================================
# Attach components
# =============================================================================
Expand All @@ -211,17 +204,15 @@ def attach_load(n):
substation_lv_i = n.buses.index[n.buses['substation_lv']]
regions = (gpd.read_file(snakemake.input.regions).set_index('name')
.reindex(substation_lv_i))
opsd_load = (timeseries_opsd(slice(*n.snapshots[[0,-1]].year.astype(str)),
snakemake.input.opsd_load) *
snakemake.config.get('load', {}).get('scaling_factor', 1.0))
opsd_load = (pd.read_csv(snakemake.input.load, index_col=0, parse_dates=True)
.filter(items=snakemake.config['countries']))

# Convert to naive UTC (has to be explicit since pandas 0.24)
opsd_load.index = opsd_load.index.tz_localize(None)
scaling = snakemake.config.get('load', {}).get('scaling_factor', 1.0)
logger.info(f"Load data scaled with scalling factor {scaling}.")
opsd_load *= scaling

nuts3 = gpd.read_file(snakemake.input.nuts3_shapes).set_index('index')

def normed(x): return x.divide(x.sum())

def upsample(cntry, group):
l = opsd_load[cntry]
if len(group) == 1:
Expand All @@ -236,7 +227,8 @@ def upsample(cntry, group):
index=group.index)

# relative factors 0.6 and 0.4 have been determined from a linear
# regression on the country to continent load data (refer to vresutils.load._upsampling_weights)
# regression on the country to continent load data
# (refer to vresutils.load._upsampling_weights)
factors = normed(0.6 * normed(gdp_n) + 0.4 * normed(pop_n))
return pd.DataFrame(factors.values * l.values[:,np.newaxis],
index=l.index, columns=factors.index)
Expand Down
Loading

0 comments on commit 9792069

Please sign in to comment.