diff --git a/Snakefile b/Snakefile index 95e79502..47856717 100644 --- a/Snakefile +++ b/Snakefile @@ -23,19 +23,21 @@ SDIR = config['summary_dir'] + '/' + config['run'] RDIR = config['results_dir'] + config['run'] + subworkflow pypsaeur: workdir: "../pypsa-eur" snakefile: "../pypsa-eur/Snakefile" configfile: "../pypsa-eur/config.yaml" -rule all: - input: SDIR + '/graphs/costs.pdf' +if config["foresight"] in ["myopic", "overnight"]: + rule all: + input: SDIR + '/graphs/costs.pdf' -rule solve_all_networks: - input: - expand(RDIR + "/postnetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc", - **config['scenario']) + rule solve_all_networks: + input: + expand(RDIR + "/postnetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc", + **config['scenario']) rule prepare_sector_networks: @@ -491,7 +493,7 @@ rule prepare_sector_network: co2="data/eea/UNFCCC_v23.csv", biomass_potentials='resources/biomass_potentials_s{simpl}_{clusters}.csv', heat_profile="data/heat_load_profile_BDEW.csv", - costs="data/costs_{}.csv".format(config['costs']['year']) if config["foresight"] == "overnight" else "data/costs_{planning_horizons}.csv", + costs= "data/costs_{}.csv".format(config['costs']['year']) if config["foresight"] == "overnight" else "data/costs_{planning_horizons}.csv", profile_offwind_ac=pypsaeur("resources/profile_offwind-ac.nc"), profile_offwind_dc=pypsaeur("resources/profile_offwind-dc.nc"), h2_cavern="resources/salt_cavern_potentials_s{simpl}_{clusters}.csv", @@ -528,18 +530,18 @@ rule prepare_sector_network: script: "scripts/prepare_sector_network.py" -rule plot_network: - input: - overrides="data/override_component_attrs", - network=RDIR + "/postnetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc", - regions=pypsaeur('resources/regions_onshore_elec_s{simpl}_{clusters}.geojson') - output: - map=RDIR + "/maps/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf", - today=RDIR + "/maps/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}-today.pdf" - threads: 2 - resources: mem_mb=10000 - benchmark: RDIR + "/benchmarks/plot_network/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}" - script: "scripts/plot_network.py" +if config["foresight"] in ["myopic", "overnight"]: + rule plot_network: + input: + overrides="data/override_component_attrs", + network=RDIR + "/postnetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc" + output: + map=RDIR + "/maps/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf", + today=RDIR + "/maps/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}-today.pdf" + threads: 2 + resources: mem_mb=10000 + benchmark: RDIR + "/benchmarks/plot_network/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}" + script: "scripts/plot_network.py" rule copy_config: @@ -549,7 +551,6 @@ rule copy_config: benchmark: SDIR + "/benchmarks/copy_config" script: "scripts/copy_config.py" - rule copy_conda_env: output: SDIR + '/configs/environment.yaml' threads: 1 @@ -557,56 +558,55 @@ rule copy_conda_env: benchmark: SDIR + "/benchmarks/copy_conda_env" shell: "conda env export -f {output} --no-builds" +if config["foresight"] in ["myopic", "overnight"]: + rule make_summary: + input: + overrides="data/override_component_attrs", + networks=expand( + RDIR + "/postnetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc", + **config['scenario'] + ), + costs= "data/costs_{}.csv".format(config['scenario']['planning_horizons'][0]), + plots=expand( + RDIR + "/maps/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf", + **config['scenario'] + ) + output: + nodal_costs=SDIR + '/csvs/nodal_costs.csv', + nodal_capacities=SDIR + '/csvs/nodal_capacities.csv', + nodal_cfs=SDIR + '/csvs/nodal_cfs.csv', + cfs=SDIR + '/csvs/cfs.csv', + costs=SDIR + '/csvs/costs.csv', + capacities=SDIR + '/csvs/capacities.csv', + curtailment=SDIR + '/csvs/curtailment.csv', + energy=SDIR + '/csvs/energy.csv', + supply=SDIR + '/csvs/supply.csv', + supply_energy=SDIR + '/csvs/supply_energy.csv', + prices=SDIR + '/csvs/prices.csv', + weighted_prices=SDIR + '/csvs/weighted_prices.csv', + market_values=SDIR + '/csvs/market_values.csv', + price_statistics=SDIR + '/csvs/price_statistics.csv', + metrics=SDIR + '/csvs/metrics.csv' + threads: 2 + resources: mem_mb=10000 + benchmark: SDIR + "/benchmarks/make_summary" + script: "scripts/make_summary.py" -rule make_summary: - input: - overrides="data/override_component_attrs", - networks=expand( - RDIR + "/postnetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc", - **config['scenario'] - ), - costs="data/costs_{}.csv".format(config['costs']['year']) if config["foresight"] == "overnight" else "data/costs_{}.csv".format(config['scenario']['planning_horizons'][0]), - plots=expand( - RDIR + "/maps/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf", - **config['scenario'] - ) - output: - nodal_costs=SDIR + '/csvs/nodal_costs.csv', - nodal_capacities=SDIR + '/csvs/nodal_capacities.csv', - nodal_cfs=SDIR + '/csvs/nodal_cfs.csv', - cfs=SDIR + '/csvs/cfs.csv', - costs=SDIR + '/csvs/costs.csv', - capacities=SDIR + '/csvs/capacities.csv', - curtailment=SDIR + '/csvs/curtailment.csv', - energy=SDIR + '/csvs/energy.csv', - supply=SDIR + '/csvs/supply.csv', - supply_energy=SDIR + '/csvs/supply_energy.csv', - prices=SDIR + '/csvs/prices.csv', - weighted_prices=SDIR + '/csvs/weighted_prices.csv', - market_values=SDIR + '/csvs/market_values.csv', - price_statistics=SDIR + '/csvs/price_statistics.csv', - metrics=SDIR + '/csvs/metrics.csv' - threads: 2 - resources: mem_mb=10000 - benchmark: SDIR + "/benchmarks/make_summary" - script: "scripts/make_summary.py" - - -rule plot_summary: - input: - costs=SDIR + '/csvs/costs.csv', - energy=SDIR + '/csvs/energy.csv', - balances=SDIR + '/csvs/supply_energy.csv', - eurostat=input_eurostat, - country_codes='data/Country_codes.csv', - output: - costs=SDIR + '/graphs/costs.pdf', - energy=SDIR + '/graphs/energy.pdf', - balances=SDIR + '/graphs/balances-energy.pdf' - threads: 2 - resources: mem_mb=10000 - benchmark: SDIR + "/benchmarks/plot_summary" - script: "scripts/plot_summary.py" + rule plot_summary: + input: + costs=SDIR + '/csvs/costs.csv', + energy=SDIR + '/csvs/energy.csv', + balances=SDIR + '/csvs/supply_energy.csv', + eurostat=input_eurostat, + country_codes='data/Country_codes.csv', + output: + costs=SDIR + '/graphs/costs.pdf', + energy=SDIR + '/graphs/energy.pdf', + balances=SDIR + '/graphs/balances-energy.pdf' + threads: 2 + resources: mem_mb=10000 + benchmark: SDIR + "/benchmarks/plot_summary" + script: "scripts/plot_summary.py" if config["foresight"] == "overnight": @@ -630,7 +630,7 @@ if config["foresight"] == "overnight": script: "scripts/solve_network.py" -if config["foresight"] == "myopic": +if config["foresight"] in ["myopic","perfect"]: rule add_existing_baseyear: input: @@ -657,6 +657,9 @@ if config["foresight"] == "myopic": script: "scripts/add_existing_baseyear.py" +if config["foresight"] == "myopic": + + def solved_previous_horizon(wildcards): planning_horizons = config["scenario"]["planning_horizons"] i = planning_horizons.index(int(wildcards.planning_horizons)) @@ -681,7 +684,6 @@ if config["foresight"] == "myopic": ruleorder: add_existing_baseyear > add_brownfield - rule solve_network_myopic: input: overrides="data/override_component_attrs", @@ -698,3 +700,92 @@ if config["foresight"] == "myopic": resources: mem_mb=config['solving']['mem'] benchmark: RDIR + "/benchmarks/solve_network/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}" script: "scripts/solve_network.py" + +if config["foresight"] == "perfect": + rule prepare_perfect_foresight: + input: + **{f"network_{year}": RDIR + "/prenetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_" + f"{year}.nc" + for year in config['scenario']["planning_horizons"][1:]}, + brownfield_network = lambda w: (RDIR + "/prenetworks-brownfield/" + "elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_" + "{}.nc" + .format(str(config['scenario']["planning_horizons"][0]))), + overrides="data/override_component_attrs", + output: RDIR + "/prenetworks-brownfield/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_brownfield_all_years.nc" + threads: 2 + resources: mem_mb=10000 + script: "scripts/prepare_perfect_foresight.py" + + rule solve_network_perfect: + input: + overrides="data/override_component_attrs", + network= RDIR + "/prenetworks-brownfield/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_brownfield_all_years.nc", + costs="data/costs_2030.csv", + config=SDIR + '/configs/config.yaml' + output: RDIR + "/postnetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_brownfield_all_years.nc" + shadow: "shallow" + log: + solver=RDIR + "/logs/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_brownfield_all_years_solver.log", + python=RDIR + "/logs/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_brownfield_all_years_python.log", + memory=RDIR + "/logs/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_brownfield_all_years_memory.log" + threads: 4 + resources: mem_mb=config['solving']['mem'] + benchmark: RDIR + "/benchmarks/solve_network/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_brownfield_all_years" + script: "scripts/solve_network.py" + + rule make_summary_perfect: + input: + **{f"networks_{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}": + RDIR + f"/postnetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_brownfield_all_years.nc" + for simpl in config['scenario']['simpl'] + for clusters in config['scenario']['clusters'] + for opts in config['scenario']['opts'] + for sector_opts in config['scenario']['sector_opts'] + for lv in config['scenario']['lv']}, + costs="data/costs_2020.csv", + overrides="data/override_component_attrs", + output: + nodal_costs="results" + '/' + config['run'] + '/csvs/nodal_costs.csv', + nodal_capacities="results" + '/' + config['run'] + '/csvs/nodal_capacities.csv', + nodal_cfs="results" + '/' + config['run'] + '/csvs/nodal_cfs.csv', + cfs="results" + '/' + config['run'] + '/csvs/cfs.csv', + costs="results" + '/' + config['run'] + '/csvs/costs.csv', + capacities="results" + '/' + config['run'] + '/csvs/capacities.csv', + curtailment="results" + '/' + config['run'] + '/csvs/curtailment.csv', + capital_cost="results" + '/' + config['run'] + '/csvs/capital_cost.csv', + energy="results" + '/' + config['run'] + '/csvs/energy.csv', + supply="results" + '/' + config['run'] + '/csvs/supply.csv', + supply_energy="results" + '/' + config['run'] + '/csvs/supply_energy.csv', + prices="results" + '/' + config['run'] + '/csvs/prices.csv', + weighted_prices="results" + '/' + config['run'] + '/csvs/weighted_prices.csv', + market_values="results" + '/' + config['run'] + '/csvs/market_values.csv', + price_statistics="results" + '/' + config['run'] + '/csvs/price_statistics.csv', + metrics="results" + '/' + config['run'] + '/csvs/metrics.csv', + co2_emissions="results" + '/' + config['run'] + '/csvs/co2_emissions.csv', + threads: 2 + resources: mem_mb=10000 + script: + 'scripts/make_summary_perfect.py' + + rule plot_summary_perfect: + input: + costs_csv="results" + '/' + config['run'] + '/csvs/costs.csv', + **{f"costs_{year}": f"data/costs_{year}.csv" + for year in config['scenario']["planning_horizons"]}, + energy="results" + '/' + config['run'] + '/csvs/energy.csv', + balances="results" + '/' + config['run'] + '/csvs/supply_energy.csv', + eea ="data/eea/UNFCCC_v24.csv", + countries="results" + '/' + config['run'] + '/csvs/nodal_capacities.csv', + co2_emissions="results" + '/' + config['run'] + '/csvs/co2_emissions.csv', + capacities="results" + '/' + config['run'] + '/csvs/capacities.csv', + capital_cost="results" + '/' + config['run'] + '/csvs/capital_cost.csv', + output: + costs1="results" + '/' + config['run'] + '/graphs/costs.pdf', + costs2="results" + '/' + config['run'] + '/graphs/costs2.pdf', + costs3="results" + '/' + config['run'] + '/graphs/total_costs_per_year.pdf', + # energy="results" + '/' + config['run'] + '/graphs/energy.pdf', + balances="results" + '/' + config['run'] + '/graphs/balances-energy.pdf', + co2_emissions="results" + '/' + config['run'] + '/graphs/carbon_budget_plot.pdf', + capacities="results" + '/' + config['run'] + '/graphs/capacities.pdf', + threads: 2 + resources: mem_mb=10000 + script: + 'scripts/plot_summary_perfect.py' diff --git a/config.default.yaml b/config.default.yaml index e939f9c0..eaed47d4 100644 --- a/config.default.yaml +++ b/config.default.yaml @@ -60,6 +60,11 @@ co2_budget: 2040: 0.0712365591 2045: 0.0322580645 2050: 0 + # update of IPCC 6th AR compared to the 1.5SR. (discussed here: https://twitter.com/JoeriRogelj/status/1424743828339167233) + 1p5 : 34.2 # 25.7 # Budget in Gt CO2 for 1.5 for Europe, global 420 Gt, assuming per capita share + 1p6 : 43.259666 # 35 # Budget in Gt CO2 for 1.6 for Europe, global 580 Gt + 1p7 : 51.4 # 45 # Budget in Gt CO2 for 1.7 for Europe, global 800 Gt + 2p0 : 69.778 # 73.9 # Budget in Gt CO2 for 2 for Europe, global 1170 Gt # snapshots are originally set in PyPSA-Eur/config.yaml but used again by PyPSA-Eur-Sec snapshots: @@ -345,6 +350,7 @@ costs: discountrate: 0.07 # [EUR/USD] ECB: https://www.ecb.europa.eu/stats/exchange/eurofxref/html/eurofxref-graph-usd.en.html # noqa: E501 USD2013_to_EUR2013: 0.7532 + social_discountrate: 0.02 # Marginal and capital costs can be overwritten # capital_cost: diff --git a/scripts/make_summary.py b/scripts/make_summary.py index f9c74c89..48fa86e8 100644 --- a/scripts/make_summary.py +++ b/scripts/make_summary.py @@ -1,6 +1,5 @@ import sys -import yaml import pypsa import numpy as np @@ -409,8 +408,6 @@ def calculate_weighted_prices(n, label, weighted_prices): if carrier in ["H2", "gas"]: load = pd.DataFrame(index=n.snapshots, columns=buses, data=0.) - elif carrier[:5] == "space": - load = heat_demand_df[buses.str[:2]].rename(columns=lambda i: str(i)+suffix) else: load = n.loads_t.p_set[buses] @@ -526,15 +523,29 @@ def make_summaries(networks_dict): "metrics", ] - columns = pd.MultiIndex.from_tuples( - networks_dict.keys(), - names=["cluster", "lv", "opt", "planning_horizon"] - ) + network_paths = [key for key in snakemake.input.keys() + if "networks" in key] + + cols = pd.MultiIndex.from_tuples([key.split("_") + for key in network_paths]) + cols = cols.droplevel([0,1,4]) + + networks_dict = {cols[i] : snakemake.input[network_path] + for i, network_path in enumerate(network_paths)} + + print(networks_dict) + + # add planning horizons column + if snakemake.config["foresight"]=="perfect": + years = snakemake.config["scenario"]['planning_horizons'] + cols = pd.MultiIndex.from_product([cols.levels[0], cols.levels[1], + cols.levels[2], years]) + df = {} for output in outputs: - df[output] = pd.DataFrame(columns=columns, dtype=float) + df[output] = pd.DataFrame(columns=cols, dtype=float) for label, filename in networks_dict.items(): print(label, filename) @@ -555,24 +566,13 @@ def to_csv(df): for key in df: df[key].to_csv(snakemake.output[key]) - +#%% if __name__ == "__main__": if 'snakemake' not in globals(): from helper import mock_snakemake - snakemake = mock_snakemake('make_summary') - - networks_dict = { - (cluster, lv, opt+sector_opt, planning_horizon) : - snakemake.config['results_dir'] + snakemake.config['run'] + f'/postnetworks/elec_s{simpl}_{cluster}_lv{lv}_{opt}_{sector_opt}_{planning_horizon}.nc' \ - for simpl in snakemake.config['scenario']['simpl'] \ - for cluster in snakemake.config['scenario']['clusters'] \ - for opt in snakemake.config['scenario']['opts'] \ - for sector_opt in snakemake.config['scenario']['sector_opts'] \ - for lv in snakemake.config['scenario']['lv'] \ - for planning_horizon in snakemake.config['scenario']['planning_horizons'] - } + snakemake = mock_snakemake('make_summary_perfect') + - print(networks_dict) Nyears = 1 @@ -593,5 +593,3 @@ def to_csv(df): if snakemake.config["foresight"]=='myopic': cumulative_cost=calculate_cumulative_cost() cumulative_cost.to_csv(snakemake.config['summary_dir'] + '/' + snakemake.config['run'] + '/csvs/cumulative_cost.csv') - - diff --git a/scripts/prepare_perfect_foresight.py b/scripts/prepare_perfect_foresight.py new file mode 100644 index 00000000..64c0d36b --- /dev/null +++ b/scripts/prepare_perfect_foresight.py @@ -0,0 +1,263 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Concats pypsa networks of single investment periods to one network. + +Created on Tue Aug 16 10:40:41 2022 + +@author: lisa +""" +import pypsa +import pandas as pd +from helper import override_component_attrs, update_config_with_sector_opts +from pypsa.io import import_components_from_dataframe +from add_existing_baseyear import add_build_year_to_new_assets +from six import iterkeys +from pypsa.descriptors import expand_series +import re +import logging +logger = logging.getLogger(__name__) + +# helper functions --------------------------------------------------- +def get_missing(df, n, c): + """Get in network n missing assets of df for component c. + + Input: + df: pandas DataFrame, static values of pypsa components + n : pypsa Network to which new assets should be added + c : string, pypsa component.list_name (e.g. "generators") + Return: + pd.DataFrame with static values of missing assets + """ + df_final = getattr(n, c) + missing_i = df.index.difference(df_final.index) + return df.loc[missing_i] + + +def get_social_discount(t, r=0.01): + """Calculate for a given time t the social discount.""" + return 1 / (1 + r) ** t + + +def get_investment_weighting(time_weighting, r=0.01): + """Define cost weighting. + + Returns cost weightings depending on the the time_weighting (pd.Series) + and the social discountrate r + """ + end = time_weighting.cumsum() + start = time_weighting.cumsum().shift().fillna(0) + return pd.concat([start, end], axis=1).apply( + lambda x: sum([get_social_discount(t, r) for t in range(int(x[0]), int(x[1]))]), + axis=1, + ) + +# -------------------------------------------------------------------- +def concat_networks(years): + """Concat given pypsa networks and adds build_year. + + Return: + n : pypsa.Network for the whole planning horizon + + """ + + # input paths of sector coupling networks + network_paths = [snakemake.input.brownfield_network] + [ + snakemake.input[f"network_{year}"] for year in years[1:]] + # final concatenated network + overrides = override_component_attrs(snakemake.input.overrides) + n = pypsa.Network(override_component_attrs=overrides) + + + # iterate over single year networks and concat to perfect foresight network + for i, network_path in enumerate(network_paths): + year = years[i] + network = pypsa.Network(network_path, override_component_attrs=overrides) + network.lines["carrier"] = "AC" + add_build_year_to_new_assets(network, year) + + # static ---------------------------------- + # (1) add buses and carriers + for component in network.iterate_components(["Bus", "Carrier"]): + df_year = component.df + # get missing assets + missing = get_missing(df_year, n, component.list_name) + import_components_from_dataframe(n, missing, component.name) + # (2) add generators, links, stores and loads + for component in network.iterate_components( + ["Generator", "Link", "Store", "Load", "Line", "StorageUnit"] + ): + + df_year = component.df.copy() + missing = get_missing(df_year, n, component.list_name) + + import_components_from_dataframe(n, missing, component.name) + + # time variant -------------------------------------------------- + network_sns = pd.MultiIndex.from_product([[year], network.snapshots]) + snapshots = n.snapshots.drop("now", errors="ignore").union(network_sns) + n.set_snapshots(snapshots) + + for component in network.iterate_components(): + pnl = getattr(n, component.list_name + "_t") + for k in iterkeys(component.pnl): + pnl_year = component.pnl[k].copy().reindex(snapshots, level=1) + if pnl_year.empty and ~(component.name=="Load" and k=="p_set"): continue + if component.name == "Load": + static_load = network.loads.loc[network.loads.p_set != 0] + static_load_t = expand_series( + static_load.p_set, network_sns + ).T + pnl_year = pd.concat([pnl_year.reindex(network_sns), + static_load_t], axis=1) + columns = (pnl[k].columns.union(pnl_year.columns)).unique() + pnl[k] = pnl[k].reindex(columns=columns) + pnl[k].loc[pnl_year.index, pnl_year.columns] = pnl_year + + else: + # this is to avoid adding multiple times assets with infinit lifetime as ror + cols = pnl_year.columns.difference(pnl[k].columns) + pnl[k] = pd.concat([pnl[k], pnl_year[cols]], axis=1) + + + n.snapshot_weightings.loc[year,:] = network.snapshot_weightings.values + # set investment periods + n.investment_periods = n.snapshots.levels[0] + # weighting of the investment period -> assuming last period same weighting as the period before + time_w = n.investment_periods.to_series().diff().shift(-1).fillna(method="ffill") + n.investment_period_weightings["years"] = time_w + # set objective weightings + objective_w = get_investment_weighting(n.investment_period_weightings["years"], + social_discountrate) + n.investment_period_weightings["objective"] = objective_w + # all former static loads are now time-dependent -> set static = 0 + n.loads["p_set"] = 0 + + return n + +def adjust_stores(n): + """Make sure that stores still behave cyclic over one year and not whole + modelling horizon.""" + # cylclic constraint + cyclic_i = n.stores[n.stores.e_cyclic].index + n.stores.loc[cyclic_i, "e_cyclic_per_period"] = True + n.stores.loc[cyclic_i, "e_cyclic"] = False + # co2 store assumptions + co2_i = n.stores[n.stores.carrier.isin(["co2", "co2 stored"])].index + n.stores.loc[co2_i, "e_cyclic_per_period"] = False + + return n + +def set_phase_out(n, carrier, ct, phase_out_year): + """Set planned phase outs for given carrier,country (ct) and planned year + of phase out (phase_out_year).""" + df = n.links[(n.links.carrier.isin(carrier))& (n.links.bus1.str[:2]==ct)] + # assets which are going to be phased out before end of their lifetime + assets_i = df[df[["build_year", "lifetime"]].sum(axis=1) > phase_out_year].index + build_year = n.links.loc[assets_i, "build_year"] + # adjust lifetime + n.links.loc[assets_i, "lifetime"] = (phase_out_year - build_year).astype(float) + +def set_all_phase_outs(n): + # TODO move this to a csv or to the config + planned= [(["nuclear"], "DE", 2022), + (["nuclear"], "BE", 2025), + (["nuclear"], "ES", 2027), + (["coal", "lignite"], "DE", 2038), + (["coal", "lignite"], "ES", 2027), + (["coal", "lignite"], "FR", 2022), + (["coal", "lignite"], "GB", 2024), + (["coal", "lignite"], "IT", 2025)] + for carrier, ct, phase_out_year in planned: + set_phase_out(n, carrier,ct, phase_out_year) + # remove assets which are already phased out + remove_i = n.links[n.links[["build_year", "lifetime"]].sum(axis=1) 0 and options["bev_dsm"]: @@ -1353,6 +1357,7 @@ def add_land_transport(n, costs): e_cyclic=True, e_nom=e_nom, e_max_pu=1, + lifetime=1, e_min_pu=dsm_profile[nodes] ) @@ -1861,7 +1866,8 @@ def add_biomass(n, costs): carrier="biogas", e_nom=biogas_potentials_spatial, marginal_cost=costs.at['biogas', 'fuel'], - e_initial=biogas_potentials_spatial + e_initial=biogas_potentials_spatial, + lifetime=1, ) n.madd("Store", @@ -1870,7 +1876,8 @@ def add_biomass(n, costs): carrier="solid biomass", e_nom=solid_biomass_potentials_spatial, marginal_cost=costs.at['solid biomass', 'fuel'], - e_initial=solid_biomass_potentials_spatial + e_initial=solid_biomass_potentials_spatial, + lifetime=1, ) n.madd("Link", @@ -2779,9 +2786,9 @@ def set_temporal_aggregation(n, opts, solver_name): 'prepare_sector_network', simpl='', opts="", - clusters="37", - lv=1.5, - sector_opts='cb40ex0-365H-T-H-B-I-A-solar+p3-dist1', + clusters="45", + lv=1.0, + sector_opts='365H-T-H-B-I-A-solar+p3-dist1', planning_horizons="2020", ) @@ -2813,7 +2820,7 @@ def set_temporal_aggregation(n, opts, solver_name): spatial = define_spatial(pop_layout.index, options) - if snakemake.config["foresight"] == 'myopic': + if snakemake.config["foresight"] in ['myopic', 'perfect']: add_lifetime_wind_solar(n, costs) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 93442a41..b8892291 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -4,15 +4,23 @@ import numpy as np import pandas as pd +import math from pypsa.linopt import get_var, linexpr, define_constraints from pypsa.linopf import network_lopf, ilopf +from pypsa.descriptors import get_active_assets, expand_series + from vresutils.benchmark import memory_logger from helper import override_component_attrs, update_config_with_sector_opts +from packaging.version import Version, parse +agg_group_kwargs = ( + dict(numeric_only=False) if parse(pd.__version__) >= Version("1.3") else {} +) + import logging logger = logging.getLogger(__name__) pypsa.pf.logger.setLevel(logging.WARNING) @@ -20,11 +28,56 @@ def add_land_use_constraint(n): + if (snakemake.config["foresight"] == "perfect") and ('m' in snakemake.wildcards.clusters): + raise NotImplementedError( + "The clustermethod m is not implemented for perfect foresight" + ) + if 'm' in snakemake.wildcards.clusters: _add_land_use_constraint_m(n) else: _add_land_use_constraint(n) +def add_land_use_constraint_perfect(n): + c, attr = "Generator", "p_nom" + investments = n.snapshots.levels[0] + df = n.df(c).copy() + res = df[df.p_nom_max!=np.inf].carrier.unique() + # extendable assets + ext_i = n.df(c)[(n.df(c)["carrier"].isin(res)) & (n.df(c)["p_nom_extendable"])].index + # dataframe when assets are active + active_i = pd.concat([get_active_assets(n, c, inv_p).rename(inv_p) + for inv_p in investments],axis=1).astype(int) + # extendable and active assets + ext_and_active = active_i.T[active_i.index.intersection(ext_i)] + if ext_and_active.empty: + return + p_nom = get_var(n, c, attr)[ext_and_active.columns] + # sum over each bus and carrier should be smaller than p_nom_max + lhs = ( + linexpr((ext_and_active, p_nom)) + .T.groupby([n.df(c).carrier, n.df(c).bus]) + .sum(**agg_group_kwargs) + .T + ) + # maximum technical potential + p_nom_max_w = n.df(c).p_nom_max.loc[ext_and_active.columns] + p_nom_max_t = expand_series(p_nom_max_w, investments).T + + rhs = ( + p_nom_max_t.mul(ext_and_active) + .groupby([n.df(c).carrier, n.df(c).bus], axis=1) + .max(**agg_group_kwargs) + ).reindex(columns=lhs.columns) + + # TODO this currently leads to infeasibilities in ES and DE + # rename existing offwind to reduce technical potential + # df.carrier.replace({"offwind":"offwind-ac"},inplace=True) + #existing_p_nom = df.groupby([df.carrier, df.bus]).sum().p_nom.loc[rhs.columns] + #rhs -= existing_p_nom + # make sure that rhs is not negative because existing capacities > tech potential + #rhs.clip(lower=0, inplace=True) + define_constraints(n, lhs, "<=", rhs, "GlobalConstraint", "land_use_constraint") def _add_land_use_constraint(n): #warning: this will miss existing offwind which is not classed AC-DC and has carrier 'offwind' @@ -100,7 +153,7 @@ def prepare_network(n, solve_opts=None): n.set_snapshots(n.snapshots[:nhours]) n.snapshot_weightings[:] = 8760./nhours - if snakemake.config['foresight'] == 'myopic': + if snakemake.config['foresight']=="myopic": add_land_use_constraint(n) return n @@ -214,15 +267,20 @@ def add_pipe_retrofit_constraint(n): def add_co2_sequestration_limit(n, sns): - + # TODO isn't it better to have the limit on the e_nom instead of last sn e? co2_stores = n.stores.loc[n.stores.carrier=='co2 stored'].index if co2_stores.empty or ('Store', 'e') not in n.variables.index: return + if snakemake.config["foresight"]=="perfect": + last_sn = (n.snapshot_weightings.loc[sns].reset_index(level=1, drop=False) + .groupby(level=0).last().reset_index() + .set_index(["period", "timestep"]).index) + else: + last_sn = sns[-1:] + vars_final_co2_stored = get_var(n, 'Store', 'e').loc[last_sn, co2_stores] - vars_final_co2_stored = get_var(n, 'Store', 'e').loc[sns[-1], co2_stores] - - lhs = linexpr((1, vars_final_co2_stored)).sum() + lhs = linexpr((1, vars_final_co2_stored)).sum(axis=1) limit = n.config["sector"].get("co2_sequestration_potential", 200) * 1e6 for o in opts: @@ -240,20 +298,56 @@ def add_co2_sequestration_limit(n, sns): 'mu', axes=pd.Index([name]), spec=name) + +def add_carbon_constraint(n, snapshots): + glcs = n.global_constraints.query('type == "Co2constraint"') + if glcs.empty: + return + for name, glc in glcs.iterrows(): + rhs = glc.constant + sense = glc.sense + carattr = glc.carrier_attribute + emissions = n.carriers.query(f"{carattr} != 0")[carattr] + if emissions.empty: + continue + + # stores + n.stores["carrier"] = n.stores.bus.map(n.buses.carrier) + stores = n.stores.query("carrier in @emissions.index and not e_cyclic") + if not stores.empty: + time_valid = int(glc.loc["investment_period"]) if not math.isnan(glc.loc["investment_period"]) else n.snapshots.levels[0] + time_weightings = n.investment_period_weightings.loc[time_valid, "years"] + if type(time_weightings) == pd.Series: + time_weightings = expand_series(time_weightings, stores.index) + final_e = get_var(n, "Store", "e").groupby(level=0).last().loc[time_valid, stores.index] + + lhs = linexpr((time_weightings, final_e)) + define_constraints(n, lhs, sense, rhs, "GlobalConstraint", "mu", + axes=pd.Index([name]), spec=name) + + def extra_functionality(n, snapshots): add_battery_constraints(n) add_pipe_retrofit_constraint(n) add_co2_sequestration_limit(n, snapshots) + if snakemake.config['foresight']=="perfect": + add_land_use_constraint_perfect(n) + add_carbon_constraint(n, snapshots) + + def solve_network(n, config, opts='', **kwargs): solver_options = config['solving']['solver'].copy() solver_name = solver_options.pop('name') cf_solving = config['solving']['options'] + if snakemake.config["foresight"] == "perfect": + cf_solving["multi_investment_periods"] = True track_iterations = cf_solving.get('track_iterations', False) min_iterations = cf_solving.get('min_iterations', 4) max_iterations = cf_solving.get('max_iterations', 6) keep_shadowprices = cf_solving.get('keep_shadowprices', True) + multi_investment = cf_solving.get("multi_investment_periods", False) # add to network for extra_functionality n.config = config @@ -262,7 +356,8 @@ def solve_network(n, config, opts='', **kwargs): if cf_solving.get('skip_iterations', False): network_lopf(n, solver_name=solver_name, solver_options=solver_options, extra_functionality=extra_functionality, - keep_shadowprices=keep_shadowprices, **kwargs) + keep_shadowprices=keep_shadowprices, + multi_investment_periods=multi_investment, **kwargs) else: ilopf(n, solver_name=solver_name, solver_options=solver_options, track_iterations=track_iterations, @@ -270,21 +365,22 @@ def solve_network(n, config, opts='', **kwargs): max_iterations=max_iterations, extra_functionality=extra_functionality, keep_shadowprices=keep_shadowprices, + multi_investment_periods=multi_investment, **kwargs) return n - +#%% if __name__ == "__main__": if 'snakemake' not in globals(): from helper import mock_snakemake snakemake = mock_snakemake( - 'solve_network', + 'solve_network_myopic', simpl='', opts="", - clusters="37", + clusters="45", lv=1.0, - sector_opts='168H-T-H-B-I-A-solar+p3-dist1', - planning_horizons="2030", + planning_horizons="2020", + sector_opts='Co2L0-365H-T-H-B-I-A-solar+p3-dist1', ) logging.basicConfig(filename=snakemake.log.python, @@ -316,6 +412,11 @@ def solve_network(n, config, opts='', **kwargs): n.line_volume_limit_dual = n.global_constraints.at["lv_limit", "mu"] n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) + + # rename columns since multi columns not working with pypsa.io + for key in n.global_constraints_t.keys(): + n.global_constraints_t[key].columns = ['{} {}'.format(i, j) for i, j in n.global_constraints_t[key].columns] + n.export_to_netcdf(snakemake.output[0]) logger.info("Maximum memory usage: {}".format(mem.mem_usage))