From 6d65c5ac750264bb598aa00b2965c3c59dfd435c Mon Sep 17 00:00:00 2001 From: lisazeyen Date: Mon, 22 Aug 2022 10:49:33 +0200 Subject: [PATCH 1/8] add perfect foresight --- Snakefile | 220 +++++++++++++++++++-------- scripts/prepare_perfect_foresight.py | 195 ++++++++++++++++++++++++ scripts/prepare_sector_network.py | 23 ++- scripts/solve_network.py | 92 +++++++++-- 4 files changed, 444 insertions(+), 86 deletions(-) create mode 100644 scripts/prepare_perfect_foresight.py diff --git a/Snakefile b/Snakefile index 6db0bca0..4f727bad 100644 --- a/Snakefile +++ b/Snakefile @@ -29,14 +29,15 @@ subworkflow pypsaeur: snakefile: "../pypsa-eur/Snakefile" configfile: "../pypsa-eur/config.yaml" -rule all: - input: SDIR + '/graphs/costs.pdf' +if config["foresight"] in ["myopic", "overnight"]: + rule all: + input: SDIR + '/graphs/costs.pdf' -rule solve_all_networks: - input: - expand(RDIR + "/postnetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc", - **config['scenario']) + rule solve_all_networks: + input: + expand(RDIR + "/postnetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc", + **config['scenario']) rule prepare_sector_networks: @@ -511,17 +512,18 @@ rule prepare_sector_network: script: "scripts/prepare_sector_network.py" -rule plot_network: - input: - overrides="data/override_component_attrs", - network=RDIR + "/postnetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc" - output: - map=RDIR + "/maps/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf", - today=RDIR + "/maps/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}-today.pdf" - threads: 2 - resources: mem_mb=10000 - benchmark: RDIR + "/benchmarks/plot_network/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}" - script: "scripts/plot_network.py" +if config["foresight"] in ["myopic", "overnight"]: + rule plot_network: + input: + overrides="data/override_component_attrs", + network=RDIR + "/postnetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc" + output: + map=RDIR + "/maps/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf", + today=RDIR + "/maps/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}-today.pdf" + threads: 2 + resources: mem_mb=10000 + benchmark: RDIR + "/benchmarks/plot_network/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}" + script: "scripts/plot_network.py" rule copy_config: @@ -531,56 +533,56 @@ rule copy_config: benchmark: SDIR + "/benchmarks/copy_config" script: "scripts/copy_config.py" - -rule make_summary: - input: - overrides="data/override_component_attrs", - networks=expand( - RDIR + "/postnetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc", - **config['scenario'] - ), - costs=CDIR + "costs_{}.csv".format(config['scenario']['planning_horizons'][0]), - plots=expand( - RDIR + "/maps/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf", - **config['scenario'] - ) - output: - nodal_costs=SDIR + '/csvs/nodal_costs.csv', - nodal_capacities=SDIR + '/csvs/nodal_capacities.csv', - nodal_cfs=SDIR + '/csvs/nodal_cfs.csv', - cfs=SDIR + '/csvs/cfs.csv', - costs=SDIR + '/csvs/costs.csv', - capacities=SDIR + '/csvs/capacities.csv', - curtailment=SDIR + '/csvs/curtailment.csv', - energy=SDIR + '/csvs/energy.csv', - supply=SDIR + '/csvs/supply.csv', - supply_energy=SDIR + '/csvs/supply_energy.csv', - prices=SDIR + '/csvs/prices.csv', - weighted_prices=SDIR + '/csvs/weighted_prices.csv', - market_values=SDIR + '/csvs/market_values.csv', - price_statistics=SDIR + '/csvs/price_statistics.csv', - metrics=SDIR + '/csvs/metrics.csv' - threads: 2 - resources: mem_mb=10000 - benchmark: SDIR + "/benchmarks/make_summary" - script: "scripts/make_summary.py" +if config["foresight"] in ["myopic", "overnight"]: + rule make_summary: + input: + overrides="data/override_component_attrs", + networks=expand( + RDIR + "/postnetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc", + **config['scenario'] + ), + costs=CDIR + "costs_{}.csv".format(config['scenario']['planning_horizons'][0]), + plots=expand( + RDIR + "/maps/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf", + **config['scenario'] + ) + output: + nodal_costs=SDIR + '/csvs/nodal_costs.csv', + nodal_capacities=SDIR + '/csvs/nodal_capacities.csv', + nodal_cfs=SDIR + '/csvs/nodal_cfs.csv', + cfs=SDIR + '/csvs/cfs.csv', + costs=SDIR + '/csvs/costs.csv', + capacities=SDIR + '/csvs/capacities.csv', + curtailment=SDIR + '/csvs/curtailment.csv', + energy=SDIR + '/csvs/energy.csv', + supply=SDIR + '/csvs/supply.csv', + supply_energy=SDIR + '/csvs/supply_energy.csv', + prices=SDIR + '/csvs/prices.csv', + weighted_prices=SDIR + '/csvs/weighted_prices.csv', + market_values=SDIR + '/csvs/market_values.csv', + price_statistics=SDIR + '/csvs/price_statistics.csv', + metrics=SDIR + '/csvs/metrics.csv' + threads: 2 + resources: mem_mb=10000 + benchmark: SDIR + "/benchmarks/make_summary" + script: "scripts/make_summary.py" -rule plot_summary: - input: - costs=SDIR + '/csvs/costs.csv', - energy=SDIR + '/csvs/energy.csv', - balances=SDIR + '/csvs/supply_energy.csv', - eurostat=input_eurostat, - country_codes='data/Country_codes.csv', - output: - costs=SDIR + '/graphs/costs.pdf', - energy=SDIR + '/graphs/energy.pdf', - balances=SDIR + '/graphs/balances-energy.pdf' - threads: 2 - resources: mem_mb=10000 - benchmark: SDIR + "/benchmarks/plot_summary" - script: "scripts/plot_summary.py" + rule plot_summary: + input: + costs=SDIR + '/csvs/costs.csv', + energy=SDIR + '/csvs/energy.csv', + balances=SDIR + '/csvs/supply_energy.csv', + eurostat=input_eurostat, + country_codes='data/Country_codes.csv', + output: + costs=SDIR + '/graphs/costs.pdf', + energy=SDIR + '/graphs/energy.pdf', + balances=SDIR + '/graphs/balances-energy.pdf' + threads: 2 + resources: mem_mb=10000 + benchmark: SDIR + "/benchmarks/plot_summary" + script: "scripts/plot_summary.py" if config["foresight"] == "overnight": @@ -603,7 +605,7 @@ if config["foresight"] == "overnight": script: "scripts/solve_network.py" -if config["foresight"] == "myopic": +if config["foresight"] in ["myopic","perfect"]: rule add_existing_baseyear: input: @@ -630,6 +632,9 @@ if config["foresight"] == "myopic": script: "scripts/add_existing_baseyear.py" +if config["foresight"] == "myopic": + + def solved_previous_horizon(wildcards): planning_horizons = config["scenario"]["planning_horizons"] i = planning_horizons.index(int(wildcards.planning_horizons)) @@ -654,7 +659,6 @@ if config["foresight"] == "myopic": ruleorder: add_existing_baseyear > add_brownfield - rule solve_network_myopic: input: overrides="data/override_component_attrs", @@ -671,3 +675,85 @@ if config["foresight"] == "myopic": resources: mem_mb=config['solving']['mem'] benchmark: RDIR + "/benchmarks/solve_network/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}" script: "scripts/solve_network.py" + +if config["foresight"] == "perfect": + rule prepare_perfect_foresight: + input: + network=expand(RDIR + "/prenetworks/" + "elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc", **config['scenario']), + brownfield_network = lambda w: (RDIR + "/prenetworks-brownfield/" + "elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_" + "{}.nc" + .format(str(config['scenario']["planning_horizons"][0]))), + overrides="data/override_component_attrs", + output: RDIR + "/prenetworks-brownfield/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_brownfield_all_years.nc" + threads: 2 + resources: mem_mb=10000 + script: "scripts/prepare_perfect_foresight.py" + + rule solve_network_perfect: + input: + overrides="data/override_component_attrs", + network= RDIR + "/prenetworks-brownfield/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_brownfield_all_years.nc", + costs=CDIR + "costs_2030.csv", + config=SDIR + '/configs/config.yaml' + output: RDIR + "/postnetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_brownfield_all_years.nc" + shadow: "shallow" + log: + solver=RDIR + "/logs/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_brownfield_all_years_solver.log", + python=RDIR + "/logs/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_brownfield_all_years_python.log", + memory=RDIR + "/logs/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_brownfield_all_years_memory.log" + threads: 4 + resources: mem_mb=config['solving']['mem'] + benchmark: RDIR + "/benchmarks/solve_network/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_brownfield_all_years" + script: "scripts/solve_network.py" + + rule make_summary_perfect: + input: + networks=expand(RDIR + "/postnetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_brownfield_all_years.nc", + **config['scenario']), + costs=CDIR + "costs_2020.csv", + overrides="data/override_component_attrs", + output: + nodal_costs="results" + '/' + config['run'] + '/csvs/nodal_costs.csv', + nodal_capacities="results" + '/' + config['run'] + '/csvs/nodal_capacities.csv', + nodal_cfs="results" + '/' + config['run'] + '/csvs/nodal_cfs.csv', + cfs="results" + '/' + config['run'] + '/csvs/cfs.csv', + costs="results" + '/' + config['run'] + '/csvs/costs.csv', + capacities="results" + '/' + config['run'] + '/csvs/capacities.csv', + curtailment="results" + '/' + config['run'] + '/csvs/curtailment.csv', + capital_cost="results" + '/' + config['run'] + '/csvs/capital_cost.csv', + energy="results" + '/' + config['run'] + '/csvs/energy.csv', + supply="results" + '/' + config['run'] + '/csvs/supply.csv', + supply_energy="results" + '/' + config['run'] + '/csvs/supply_energy.csv', + prices="results" + '/' + config['run'] + '/csvs/prices.csv', + weighted_prices="results" + '/' + config['run'] + '/csvs/weighted_prices.csv', + market_values="results" + '/' + config['run'] + '/csvs/market_values.csv', + price_statistics="results" + '/' + config['run'] + '/csvs/price_statistics.csv', + metrics="results" + '/' + config['run'] + '/csvs/metrics.csv', + co2_emissions="results" + '/' + config['run'] + '/csvs/co2_emissions.csv', + threads: 2 + resources: mem_mb=10000 + script: + 'scripts/make_summary_perfect.py' + + rule plot_summary_perfect: + input: + costs_csv="results" + '/' + config['run'] + '/csvs/costs.csv', + costs=CDIR, + energy="results" + '/' + config['run'] + '/csvs/energy.csv', + balances="results" + '/' + config['run'] + '/csvs/supply_energy.csv', + eea ="data/eea/UNFCCC_v24.csv", + countries="results" + '/' + config['run'] + '/csvs/nodal_capacities.csv', + co2_emissions="results" + '/' + config['run'] + '/csvs/co2_emissions.csv', + capacities="results" + '/' + config['run'] + '/csvs/capacities.csv', + capital_cost="results" + '/' + config['run'] + '/csvs/capital_cost.csv', + output: + costs1="results" + '/' + config['run'] + '/graphs/costs.pdf', + costs2="results" + '/' + config['run'] + '/graphs/costs2.pdf', + costs3="results" + '/' + config['run'] + '/graphs/total_costs_per_year.pdf', + # energy="results" + '/' + config['run'] + '/graphs/energy.pdf', + balances="results" + '/' + config['run'] + '/graphs/balances-energy.pdf', + co2_emissions="results" + '/' + config['run'] + '/graphs/carbon_budget_plot.pdf', + capacities="results" + '/' + config['run'] + '/graphs/capacities.pdf', + threads: 2 + resources: mem_mb=10000 + script: + 'scripts/plot_summary_perfect.py' diff --git a/scripts/prepare_perfect_foresight.py b/scripts/prepare_perfect_foresight.py new file mode 100644 index 00000000..00abbc64 --- /dev/null +++ b/scripts/prepare_perfect_foresight.py @@ -0,0 +1,195 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Concats pypsa networks of single investment periods to one network. + +Created on Tue Aug 16 10:40:41 2022 + +@author: lisa +""" +import pypsa +import pandas as pd +from helper import override_component_attrs, update_config_with_sector_opts +from pypsa.io import import_components_from_dataframe +from add_existing_baseyear import add_build_year_to_new_assets +from six import iterkeys +from pypsa.descriptors import expand_series +import logging +logger = logging.getLogger(__name__) + +# helper functions --------------------------------------------------- +def get_missing(df, n, c): + """Get in network n missing assets of df for component c. + + Input: + df: pandas DataFrame, static values of pypsa components + n : pypsa Network to which new assets should be added + c : string, pypsa component.list_name (e.g. "generators") + Return: + pd.DataFrame with static values of missing assets + """ + df_final = getattr(n, c) + missing_i = df.index.difference(df_final.index) + return df.loc[missing_i] + + +def get_social_discount(t, r=0.01): + """Calculate for a given time t the social discount.""" + return 1 / (1 + r) ** t + + +def get_investment_weighting(time_weighting, r=0.01): + """Define cost weighting. + + Returns cost weightings depending on the the time_weighting (pd.Series) + and the social discountrate r + """ + end = time_weighting.cumsum() + start = time_weighting.cumsum().shift().fillna(0) + return pd.concat([start, end], axis=1).apply( + lambda x: sum([get_social_discount(t, r) for t in range(int(x[0]), int(x[1]))]), + axis=1, + ) + +# -------------------------------------------------------------------- +def concat_networks(years): + """Concat given pypsa networks and adds build_year. + + Return: + n : pypsa.Network for the whole planning horizon + + """ + + # input paths of sector coupling networks + network_paths = [snakemake.input.brownfield_network] + snakemake.input.network[1:] + # final concatenated network + overrides = override_component_attrs(snakemake.input.overrides) + n = pypsa.Network( override_component_attrs=overrides) + + + # iterate over single year networks and concat to perfect foresight network + for i, network_path in enumerate(network_paths): + year = years[i] + network = pypsa.Network(network_path, override_component_attrs=overrides) + network.lines["carrier"] = "AC" + add_build_year_to_new_assets(network, year) + + # static ---------------------------------- + # (1) add buses and carriers + for component in network.iterate_components(["Bus", "Carrier"]): + df_year = component.df + # get missing assets + missing = get_missing(df_year, n, component.list_name) + import_components_from_dataframe(n, missing, component.name) + # (2) add generators, links, stores and loads + for component in network.iterate_components( + ["Generator", "Link", "Store", "Load", "Line", "StorageUnit"] + ): + + df_year = component.df.copy() + missing = get_missing(df_year, n, component.list_name) + + import_components_from_dataframe(n, missing, component.name) + + # time variant -------------------------------------------------- + network_sns = pd.MultiIndex.from_product([[year], network.snapshots]) + snapshots = n.snapshots.drop("now", errors="ignore").union(network_sns) + n.set_snapshots(snapshots) + + for component in network.iterate_components(): + pnl = getattr(n, component.list_name + "_t") + for k in iterkeys(component.pnl): + pnl_year = component.pnl[k].copy().reindex(snapshots, level=1) + if pnl_year.empty and ~(component.name=="Load" and k=="p_set"): continue + if component.name == "Load": + static_load = network.loads.loc[network.loads.p_set != 0] + static_load_t = expand_series( + static_load.p_set, network_sns + ).T + pnl_year = pd.concat([pnl_year.reindex(network_sns), + static_load_t], axis=1) + columns = (pnl[k].columns.union(pnl_year.columns)).unique() + pnl[k] = pnl[k].reindex(columns=columns) + pnl[k].loc[pnl_year.index, pnl_year.columns] = pnl_year + + else: + # this is to avoid adding multiple times assets with infinit lifetime as ror + cols = pnl_year.columns.difference(pnl[k].columns) + pnl[k] = pd.concat([pnl[k], pnl_year[cols]], axis=1) + + + n.snapshot_weightings.loc[year,:] = network.snapshot_weightings.values + # set investment periods + n.investment_periods = n.snapshots.levels[0] + # weighting of the investment period -> assuming last period same weighting as the period before + time_w = n.investment_periods.to_series().diff().shift(-1).fillna(method="ffill") + n.investment_period_weightings["years"] = time_w + # set objective weightings + objective_w = get_investment_weighting(n.investment_period_weightings["years"], + social_discountrate) + n.investment_period_weightings["objective"] = objective_w + # all former static loads are now time-dependent -> set static = 0 + n.loads["p_set"] = 0 + + return n + +def adjust_stores(n): + # cylclic constraint + cyclic_i = n.stores[n.stores.e_cyclic].index + n.stores.loc[cyclic_i, "e_cyclic_per_period"] = True + n.stores.loc[cyclic_i, "e_cyclic"] = False + # co2 store assumptions + co2_i = n.stores[n.stores.carrier.isin(["co2", "co2 stored"])].index + n.stores.loc[co2_i, "e_cyclic_per_period"] = False + + return n + +def set_phase_out(n, carrier, ct, phase_out_year): + df = n.links[(n.links.carrier.isin(carrier))& (n.links.bus1.str[:2]==ct)] + assets_i = df[df[["build_year", "lifetime"]].sum(axis=1) > phase_out_year].index + n.links.loc[assets_i, "lifetime"] = (phase_out_year - n.links.loc[assets_i, "build_year"]).astype(float) + +def set_all_phase_outs(n): + planned= [(["nuclear"], "DE", 2022), + (["nuclear"], "BE", 2025), + (["nuclear"], "ES", 2027), + (["coal", "lignite"], "DE", 2038), + (["coal", "lignite"], "ES", 2027), + (["coal", "lignite"], "FR", 2022), + (["coal", "lignite"], "GB", 2024), + (["coal", "lignite"], "IT", 2025)] + for carrier, ct, phase_out_year in planned: + set_phase_out(n, carrier,ct, phase_out_year) + # remove assets which are already phased out + remove_i = n.links[n.links[["build_year", "lifetime"]].sum(axis=1) 0 and options["bev_dsm"]: @@ -1269,6 +1273,7 @@ def add_land_transport(n, costs): e_cyclic=True, e_nom=e_nom, e_max_pu=1, + lifetime=1, e_min_pu=dsm_profile[nodes] ) @@ -1776,7 +1781,8 @@ def add_biomass(n, costs): carrier="biogas", e_nom=biogas_potentials_spatial, marginal_cost=costs.at['biogas', 'fuel'], - e_initial=biogas_potentials_spatial + e_initial=biogas_potentials_spatial, + lifetime=1, ) n.madd("Store", @@ -1785,7 +1791,8 @@ def add_biomass(n, costs): carrier="solid biomass", e_nom=solid_biomass_potentials_spatial, marginal_cost=costs.at['solid biomass', 'fuel'], - e_initial=solid_biomass_potentials_spatial + e_initial=solid_biomass_potentials_spatial, + lifetime=1, ) n.madd("Link", @@ -2425,9 +2432,9 @@ def limit_individual_line_extension(n, maxext): 'prepare_sector_network', simpl='', opts="", - clusters="37", - lv=1.5, - sector_opts='cb40ex0-365H-T-H-B-I-A-solar+p3-dist1', + clusters="45", + lv=1.0, + sector_opts='365H-T-H-B-I-A-solar+p3-dist1', planning_horizons="2020", ) @@ -2459,7 +2466,7 @@ def limit_individual_line_extension(n, maxext): spatial = define_spatial(pop_layout.index, options) - if snakemake.config["foresight"] == 'myopic': + if snakemake.config["foresight"] in ['myopic', 'perfect']: add_lifetime_wind_solar(n, costs) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 563d8c29..18916158 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -9,10 +9,17 @@ from pypsa.linopf import network_lopf, ilopf +from pypsa.descriptors import get_active_assets, expand_series + from vresutils.benchmark import memory_logger from helper import override_component_attrs, update_config_with_sector_opts +from packaging.version import Version, parse +agg_group_kwargs = ( + dict(numeric_only=False) if parse(pd.__version__) >= Version("1.3") else {} +) + import logging logger = logging.getLogger(__name__) pypsa.pf.logger.setLevel(logging.WARNING) @@ -20,11 +27,56 @@ def add_land_use_constraint(n): + if (snakemake.config["foresight"] == "perfect") and ('m' in snakemake.wildcards.clusters): + raise NotImplementedError( + "The clustermethod m is not implemented for perfect foresight" + ) + if 'm' in snakemake.wildcards.clusters: _add_land_use_constraint_m(n) else: _add_land_use_constraint(n) +def add_land_use_constraint_perfect(n): + c, attr = "Generator", "p_nom" + investments = n.snapshots.levels[0] + df = n.df(c).copy() + res = df[df.p_nom_max!=np.inf].carrier.unique() + # extendable assets + ext_i = n.df(c)[(n.df(c)["carrier"].isin(res)) & (n.df(c)["p_nom_extendable"])].index + # dataframe when assets are active + active_i = pd.concat([get_active_assets(n, c, inv_p).rename(inv_p) + for inv_p in investments],axis=1).astype(int) + # extendable and active assets + ext_and_active = active_i.T[active_i.index.intersection(ext_i)] + if ext_and_active.empty: + return + p_nom = get_var(n, c, attr)[ext_and_active.columns] + # sum over each bus and carrier should be smaller than p_nom_max + lhs = ( + linexpr((ext_and_active, p_nom)) + .T.groupby([n.df(c).carrier, n.df(c).bus]) + .sum(**agg_group_kwargs) + .T + ) + # maximum technical potential + p_nom_max_w = n.df(c).p_nom_max.loc[ext_and_active.columns] + p_nom_max_t = expand_series(p_nom_max_w, investments).T + + rhs = ( + p_nom_max_t.mul(ext_and_active) + .groupby([n.df(c).carrier, n.df(c).bus], axis=1) + .max(**agg_group_kwargs) + ).reindex(columns=lhs.columns) + + # TODO this currently leads to infeasibilities in ES and DE + # rename existing offwind to reduce technical potential + # df.carrier.replace({"offwind":"offwind-ac"},inplace=True) + #existing_p_nom = df.groupby([df.carrier, df.bus]).sum().p_nom.loc[rhs.columns] + #rhs -= existing_p_nom + # make sure that rhs is not negative because existing capacities > tech potential + #rhs.clip(lower=0, inplace=True) + define_constraints(n, lhs, "<=", rhs, "GlobalConstraint", "land_use_constraint") def _add_land_use_constraint(n): #warning: this will miss existing offwind which is not classed AC-DC and has carrier 'offwind' @@ -100,7 +152,7 @@ def prepare_network(n, solve_opts=None): n.set_snapshots(n.snapshots[:nhours]) n.snapshot_weightings[:] = 8760./nhours - if snakemake.config['foresight'] == 'myopic': + if snakemake.config['foresight']=="myopic": add_land_use_constraint(n) return n @@ -214,15 +266,20 @@ def add_pipe_retrofit_constraint(n): def add_co2_sequestration_limit(n, sns): - + # TODO isn't it better to have the limit on the e_nom instead of last sn e? co2_stores = n.stores.loc[n.stores.carrier=='co2 stored'].index if co2_stores.empty or ('Store', 'e') not in n.variables.index: return + if snakemake.config["foresight"]: + last_sn = (n.snapshot_weightings.loc[sns].reset_index(level=1, drop=False) + .groupby(level=0).last().reset_index() + .set_index(["period", "timestep"]).index) + else: + last_sn = sns[-1] + vars_final_co2_stored = get_var(n, 'Store', 'e').loc[last_sn, co2_stores] - vars_final_co2_stored = get_var(n, 'Store', 'e').loc[sns[-1], co2_stores] - - lhs = linexpr((1, vars_final_co2_stored)).sum() + lhs = linexpr((1, vars_final_co2_stored)).sum(axis=1) limit = n.config["sector"].get("co2_sequestration_potential", 200) * 1e6 for o in opts: @@ -245,15 +302,22 @@ def extra_functionality(n, snapshots): add_pipe_retrofit_constraint(n) add_co2_sequestration_limit(n, snapshots) + if snakemake.config['foresight']=="perfect": + add_land_use_constraint_perfect(n) + + def solve_network(n, config, opts='', **kwargs): solver_options = config['solving']['solver'].copy() solver_name = solver_options.pop('name') cf_solving = config['solving']['options'] + if snakemake.config["foresight"] == "perfect": + cf_solving["multi_investment_periods"] = True track_iterations = cf_solving.get('track_iterations', False) min_iterations = cf_solving.get('min_iterations', 4) max_iterations = cf_solving.get('max_iterations', 6) keep_shadowprices = cf_solving.get('keep_shadowprices', True) + multi_investment = cf_solving.get("multi_investment_periods", False) # add to network for extra_functionality n.config = config @@ -262,7 +326,8 @@ def solve_network(n, config, opts='', **kwargs): if cf_solving.get('skip_iterations', False): network_lopf(n, solver_name=solver_name, solver_options=solver_options, extra_functionality=extra_functionality, - keep_shadowprices=keep_shadowprices, **kwargs) + keep_shadowprices=keep_shadowprices, + multi_investment_periods=multi_investment, **kwargs) else: ilopf(n, solver_name=solver_name, solver_options=solver_options, track_iterations=track_iterations, @@ -270,21 +335,21 @@ def solve_network(n, config, opts='', **kwargs): max_iterations=max_iterations, extra_functionality=extra_functionality, keep_shadowprices=keep_shadowprices, + multi_investment_periods=multi_investment, **kwargs) return n - +#%% if __name__ == "__main__": if 'snakemake' not in globals(): from helper import mock_snakemake snakemake = mock_snakemake( - 'solve_network', + 'solve_network_perfect', simpl='', opts="", - clusters="37", + clusters="45", lv=1.0, - sector_opts='168H-T-H-B-I-A-solar+p3-dist1', - planning_horizons="2030", + sector_opts='365H-T-H-B-I-A-solar+p3-dist1', ) logging.basicConfig(filename=snakemake.log.python, @@ -316,6 +381,11 @@ def solve_network(n, config, opts='', **kwargs): n.line_volume_limit_dual = n.global_constraints.at["lv_limit", "mu"] n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) + + # rename columns since multi columns not working with pypsa.io + for key in n.global_constraints_t.keys(): + n.global_constraints_t[key].columns = ['{} {}'.format(i, j) for i, j in n.global_constraints_t[key].columns] + n.export_to_netcdf(snakemake.output[0]) logger.info("Maximum memory usage: {}".format(mem.mem_usage)) From 343f61b7626d05b2529ee7f167dcbb1f5a2358b4 Mon Sep 17 00:00:00 2001 From: lisazeyen Date: Mon, 22 Aug 2022 12:17:30 +0200 Subject: [PATCH 2/8] add carbon constraint --- scripts/prepare_perfect_foresight.py | 61 +++++++++++++++++++++++++++- scripts/solve_network.py | 32 ++++++++++++++- 2 files changed, 91 insertions(+), 2 deletions(-) diff --git a/scripts/prepare_perfect_foresight.py b/scripts/prepare_perfect_foresight.py index 00abbc64..df464db4 100644 --- a/scripts/prepare_perfect_foresight.py +++ b/scripts/prepare_perfect_foresight.py @@ -14,6 +14,7 @@ from add_existing_baseyear import add_build_year_to_new_assets from six import iterkeys from pypsa.descriptors import expand_series +import re import logging logger = logging.getLogger(__name__) @@ -163,6 +164,60 @@ def set_all_phase_outs(n): # remove assets which are already phased out remove_i = n.links[n.links[["build_year", "lifetime"]].sum(axis=1) Date: Mon, 22 Aug 2022 13:16:42 +0200 Subject: [PATCH 3/8] update config.yaml for perfect foresight --- config.default.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/config.default.yaml b/config.default.yaml index 5f7cbb33..9ebe7083 100644 --- a/config.default.yaml +++ b/config.default.yaml @@ -60,6 +60,11 @@ co2_budget: 2040: 0.0712365591 2045: 0.0322580645 2050: 0 + # update of IPCC 6th AR compared to the 1.5SR. (discussed here: https://twitter.com/JoeriRogelj/status/1424743828339167233) + 1p5 : 34.2 # 25.7 # Budget in Gt CO2 for 1.5 for Europe, global 420 Gt, assuming per capita share + 1p6 : 43.259666 # 35 # Budget in Gt CO2 for 1.6 for Europe, global 580 Gt + 1p7 : 51.4 # 45 # Budget in Gt CO2 for 1.7 for Europe, global 800 Gt + 2p0 : 69.778 # 73.9 # Budget in Gt CO2 for 2 for Europe, global 1170 Gt # snapshots are originally set in PyPSA-Eur/config.yaml but used again by PyPSA-Eur-Sec snapshots: From 18013b0130bef4607552b8f544198e5d955825c7 Mon Sep 17 00:00:00 2001 From: lisazeyen Date: Fri, 10 Feb 2023 09:06:46 +0100 Subject: [PATCH 4/8] add some comments --- Snakefile | 14 +++++++------- scripts/prepare_perfect_foresight.py | 14 +++++++++++--- scripts/solve_network.py | 9 +++++---- 3 files changed, 23 insertions(+), 14 deletions(-) diff --git a/Snakefile b/Snakefile index 6e4045f8..8f88f7a1 100644 --- a/Snakefile +++ b/Snakefile @@ -542,6 +542,13 @@ rule copy_config: benchmark: SDIR + "/benchmarks/copy_config" script: "scripts/copy_config.py" +rule copy_conda_env: + output: SDIR + '/configs/environment.yaml' + threads: 1 + resources: mem_mb=500 + benchmark: SDIR + "/benchmarks/copy_conda_env" + shell: "conda env export -f {output} --no-builds" + if config["foresight"] in ["myopic", "overnight"]: rule make_summary: input: @@ -576,13 +583,6 @@ if config["foresight"] in ["myopic", "overnight"]: benchmark: SDIR + "/benchmarks/make_summary" script: "scripts/make_summary.py" -rule copy_conda_env: - output: SDIR + '/configs/environment.yaml' - threads: 1 - resources: mem_mb=500 - benchmark: SDIR + "/benchmarks/copy_conda_env" - shell: "conda env export -f {output} --no-builds" - rule plot_summary: input: costs=SDIR + '/csvs/costs.csv', diff --git a/scripts/prepare_perfect_foresight.py b/scripts/prepare_perfect_foresight.py index df464db4..ac51f2a7 100644 --- a/scripts/prepare_perfect_foresight.py +++ b/scripts/prepare_perfect_foresight.py @@ -135,6 +135,8 @@ def concat_networks(years): return n def adjust_stores(n): + """Make sure that stores still behave cyclic over one year and not whole + modelling horizon.""" # cylclic constraint cyclic_i = n.stores[n.stores.e_cyclic].index n.stores.loc[cyclic_i, "e_cyclic_per_period"] = True @@ -146,11 +148,17 @@ def adjust_stores(n): return n def set_phase_out(n, carrier, ct, phase_out_year): + """Set planned phase outs for given carrier,country (ct) and planned year + of phase out (phase_out_year).""" df = n.links[(n.links.carrier.isin(carrier))& (n.links.bus1.str[:2]==ct)] + # assets which are going to be phased out before end of their lifetime assets_i = df[df[["build_year", "lifetime"]].sum(axis=1) > phase_out_year].index - n.links.loc[assets_i, "lifetime"] = (phase_out_year - n.links.loc[assets_i, "build_year"]).astype(float) + build_year = n.links.loc[assets_i, "build_year"] + # adjust lifetime + n.links.loc[assets_i, "lifetime"] = (phase_out_year - build_year).astype(float) def set_all_phase_outs(n): + # TODO move this to a csv or to the config planned= [(["nuclear"], "DE", 2022), (["nuclear"], "BE", 2025), (["nuclear"], "ES", 2027), @@ -172,7 +180,7 @@ def set_carbon_constraints(n, opts): snakemake.config["co2_budget"]["1p7"] * 1e9 ) # budget for + 1.7 Celsius for Europe for o in opts: - # temporal clustering + # other budgets m = re.match(r"^\d+p\d$", o, re.IGNORECASE) if m is not None: budget = snakemake.config["co2_budget"][m.group(0)] * 1e9 @@ -197,7 +205,7 @@ def set_carbon_constraints(n, opts): sense="<=", constant=0, ) - + # set minimum CO2 emission constraint to avoid too fast reduction if "co2min" in opts: emissions_1990 = 4.53693 emissions_2019 = 3.344096 diff --git a/scripts/solve_network.py b/scripts/solve_network.py index dd9b4305..b8892291 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -272,12 +272,12 @@ def add_co2_sequestration_limit(n, sns): if co2_stores.empty or ('Store', 'e') not in n.variables.index: return - if snakemake.config["foresight"]: + if snakemake.config["foresight"]=="perfect": last_sn = (n.snapshot_weightings.loc[sns].reset_index(level=1, drop=False) .groupby(level=0).last().reset_index() .set_index(["period", "timestep"]).index) else: - last_sn = sns[-1] + last_sn = sns[-1:] vars_final_co2_stored = get_var(n, 'Store', 'e').loc[last_sn, co2_stores] lhs = linexpr((1, vars_final_co2_stored)).sum(axis=1) @@ -374,12 +374,13 @@ def solve_network(n, config, opts='', **kwargs): if 'snakemake' not in globals(): from helper import mock_snakemake snakemake = mock_snakemake( - 'solve_network_perfect', + 'solve_network_myopic', simpl='', opts="", clusters="45", lv=1.0, - sector_opts='365H-T-H-B-I-A-solar+p3-dist1-co2min', + planning_horizons="2020", + sector_opts='Co2L0-365H-T-H-B-I-A-solar+p3-dist1', ) logging.basicConfig(filename=snakemake.log.python, From 350a7767c27cb5f306973c8b989eca9e356e0577 Mon Sep 17 00:00:00 2001 From: lisazeyen Date: Tue, 14 Feb 2023 11:13:11 +0100 Subject: [PATCH 5/8] add social discount rate --- config.default.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/config.default.yaml b/config.default.yaml index 72094c69..eaed47d4 100644 --- a/config.default.yaml +++ b/config.default.yaml @@ -350,6 +350,7 @@ costs: discountrate: 0.07 # [EUR/USD] ECB: https://www.ecb.europa.eu/stats/exchange/eurofxref/html/eurofxref-graph-usd.en.html # noqa: E501 USD2013_to_EUR2013: 0.7532 + social_discountrate: 0.02 # Marginal and capital costs can be overwritten # capital_cost: From 8101c033bed2e6c995a0e85839e22e0804cfbcc9 Mon Sep 17 00:00:00 2001 From: lisazeyen Date: Tue, 14 Feb 2023 11:13:48 +0100 Subject: [PATCH 6/8] adjust network paths --- scripts/prepare_perfect_foresight.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/scripts/prepare_perfect_foresight.py b/scripts/prepare_perfect_foresight.py index ac51f2a7..64c0d36b 100644 --- a/scripts/prepare_perfect_foresight.py +++ b/scripts/prepare_perfect_foresight.py @@ -62,10 +62,11 @@ def concat_networks(years): """ # input paths of sector coupling networks - network_paths = [snakemake.input.brownfield_network] + snakemake.input.network[1:] + network_paths = [snakemake.input.brownfield_network] + [ + snakemake.input[f"network_{year}"] for year in years[1:]] # final concatenated network overrides = override_component_attrs(snakemake.input.overrides) - n = pypsa.Network( override_component_attrs=overrides) + n = pypsa.Network(override_component_attrs=overrides) # iterate over single year networks and concat to perfect foresight network @@ -236,7 +237,7 @@ def set_carbon_constraints(n, opts): opts="", clusters="45", lv=1.0, - sector_opts='365H-T-H-B-I-A-solar+p3-dist1-co2min', + sector_opts='1p7-365H-T-H-B-I-A-solar+p3-dist1', ) update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts) From d142994cb0319962d14609b27d66e0b03abdcd66 Mon Sep 17 00:00:00 2001 From: lisazeyen Date: Tue, 14 Feb 2023 11:14:34 +0100 Subject: [PATCH 7/8] adjust lifetime to new technology data --- scripts/prepare_sector_network.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index c4ce24de..a1446809 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -1008,7 +1008,7 @@ def add_storage_and_grids(n, costs): e_cyclic=True, carrier="H2 Store", capital_cost=h2_capital_cost, - lifetime=costs.at["hydrogen storage tank incl. compressor", "lifetime"], + lifetime=costs.at["hydrogen storage tank type 1 including compressor", "lifetime"], ) if options["gas_network"] or options["H2_retrofit"]: @@ -1280,7 +1280,7 @@ def add_land_transport(n, costs): fuel_cell_share = get(options["land_transport_fuel_cell_share"], investment_year) electric_share = get(options["land_transport_electric_share"], investment_year) ice_share = get(options["land_transport_ice_share"], investment_year) - + total_share = fuel_cell_share + electric_share + ice_share if total_share != 1: logger.warning(f"Total land transport shares sum up to {total_share*100}%, corresponding to increased or decreased demand assumptions.") From fe3f1617e9310a0ff586b663759b1ad84f45f54a Mon Sep 17 00:00:00 2001 From: lisazeyen Date: Tue, 14 Feb 2023 11:14:55 +0100 Subject: [PATCH 8/8] start to modify make summary script --- Snakefile | 27 +++++++++++++++--------- scripts/make_summary.py | 46 ++++++++++++++++++++--------------------- 2 files changed, 39 insertions(+), 34 deletions(-) diff --git a/Snakefile b/Snakefile index db94d404..47856717 100644 --- a/Snakefile +++ b/Snakefile @@ -21,7 +21,7 @@ wildcard_constraints: SDIR = config['summary_dir'] + '/' + config['run'] RDIR = config['results_dir'] + config['run'] -CDIR = config['costs_dir'] + subworkflow pypsaeur: @@ -493,7 +493,7 @@ rule prepare_sector_network: co2="data/eea/UNFCCC_v23.csv", biomass_potentials='resources/biomass_potentials_s{simpl}_{clusters}.csv', heat_profile="data/heat_load_profile_BDEW.csv", - costs=CDIR + "costs_{}.csv".format(config['costs']['year']) if config["foresight"] == "overnight" else CDIR + "costs_{planning_horizons}.csv", + costs= "data/costs_{}.csv".format(config['costs']['year']) if config["foresight"] == "overnight" else "data/costs_{planning_horizons}.csv", profile_offwind_ac=pypsaeur("resources/profile_offwind-ac.nc"), profile_offwind_dc=pypsaeur("resources/profile_offwind-dc.nc"), h2_cavern="resources/salt_cavern_potentials_s{simpl}_{clusters}.csv", @@ -557,7 +557,7 @@ rule copy_conda_env: resources: mem_mb=500 benchmark: SDIR + "/benchmarks/copy_conda_env" shell: "conda env export -f {output} --no-builds" - + if config["foresight"] in ["myopic", "overnight"]: rule make_summary: input: @@ -566,7 +566,7 @@ if config["foresight"] in ["myopic", "overnight"]: RDIR + "/postnetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc", **config['scenario'] ), - costs=CDIR + "costs_{}.csv".format(config['scenario']['planning_horizons'][0]), + costs= "data/costs_{}.csv".format(config['scenario']['planning_horizons'][0]), plots=expand( RDIR + "/maps/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}-costs-all_{planning_horizons}.pdf", **config['scenario'] @@ -704,7 +704,8 @@ if config["foresight"] == "myopic": if config["foresight"] == "perfect": rule prepare_perfect_foresight: input: - network=expand(RDIR + "/prenetworks/" + "elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc", **config['scenario']), + **{f"network_{year}": RDIR + "/prenetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_" + f"{year}.nc" + for year in config['scenario']["planning_horizons"][1:]}, brownfield_network = lambda w: (RDIR + "/prenetworks-brownfield/" + "elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_" + "{}.nc" .format(str(config['scenario']["planning_horizons"][0]))), overrides="data/override_component_attrs", @@ -717,7 +718,7 @@ if config["foresight"] == "perfect": input: overrides="data/override_component_attrs", network= RDIR + "/prenetworks-brownfield/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_brownfield_all_years.nc", - costs=CDIR + "costs_2030.csv", + costs="data/costs_2030.csv", config=SDIR + '/configs/config.yaml' output: RDIR + "/postnetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_brownfield_all_years.nc" shadow: "shallow" @@ -732,9 +733,14 @@ if config["foresight"] == "perfect": rule make_summary_perfect: input: - networks=expand(RDIR + "/postnetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_brownfield_all_years.nc", - **config['scenario']), - costs=CDIR + "costs_2020.csv", + **{f"networks_{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}": + RDIR + f"/postnetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_brownfield_all_years.nc" + for simpl in config['scenario']['simpl'] + for clusters in config['scenario']['clusters'] + for opts in config['scenario']['opts'] + for sector_opts in config['scenario']['sector_opts'] + for lv in config['scenario']['lv']}, + costs="data/costs_2020.csv", overrides="data/override_component_attrs", output: nodal_costs="results" + '/' + config['run'] + '/csvs/nodal_costs.csv', @@ -762,7 +768,8 @@ if config["foresight"] == "perfect": rule plot_summary_perfect: input: costs_csv="results" + '/' + config['run'] + '/csvs/costs.csv', - costs=CDIR, + **{f"costs_{year}": f"data/costs_{year}.csv" + for year in config['scenario']["planning_horizons"]}, energy="results" + '/' + config['run'] + '/csvs/energy.csv', balances="results" + '/' + config['run'] + '/csvs/supply_energy.csv', eea ="data/eea/UNFCCC_v24.csv", diff --git a/scripts/make_summary.py b/scripts/make_summary.py index f9c74c89..48fa86e8 100644 --- a/scripts/make_summary.py +++ b/scripts/make_summary.py @@ -1,6 +1,5 @@ import sys -import yaml import pypsa import numpy as np @@ -409,8 +408,6 @@ def calculate_weighted_prices(n, label, weighted_prices): if carrier in ["H2", "gas"]: load = pd.DataFrame(index=n.snapshots, columns=buses, data=0.) - elif carrier[:5] == "space": - load = heat_demand_df[buses.str[:2]].rename(columns=lambda i: str(i)+suffix) else: load = n.loads_t.p_set[buses] @@ -526,15 +523,29 @@ def make_summaries(networks_dict): "metrics", ] - columns = pd.MultiIndex.from_tuples( - networks_dict.keys(), - names=["cluster", "lv", "opt", "planning_horizon"] - ) + network_paths = [key for key in snakemake.input.keys() + if "networks" in key] + + cols = pd.MultiIndex.from_tuples([key.split("_") + for key in network_paths]) + cols = cols.droplevel([0,1,4]) + + networks_dict = {cols[i] : snakemake.input[network_path] + for i, network_path in enumerate(network_paths)} + + print(networks_dict) + + # add planning horizons column + if snakemake.config["foresight"]=="perfect": + years = snakemake.config["scenario"]['planning_horizons'] + cols = pd.MultiIndex.from_product([cols.levels[0], cols.levels[1], + cols.levels[2], years]) + df = {} for output in outputs: - df[output] = pd.DataFrame(columns=columns, dtype=float) + df[output] = pd.DataFrame(columns=cols, dtype=float) for label, filename in networks_dict.items(): print(label, filename) @@ -555,24 +566,13 @@ def to_csv(df): for key in df: df[key].to_csv(snakemake.output[key]) - +#%% if __name__ == "__main__": if 'snakemake' not in globals(): from helper import mock_snakemake - snakemake = mock_snakemake('make_summary') - - networks_dict = { - (cluster, lv, opt+sector_opt, planning_horizon) : - snakemake.config['results_dir'] + snakemake.config['run'] + f'/postnetworks/elec_s{simpl}_{cluster}_lv{lv}_{opt}_{sector_opt}_{planning_horizon}.nc' \ - for simpl in snakemake.config['scenario']['simpl'] \ - for cluster in snakemake.config['scenario']['clusters'] \ - for opt in snakemake.config['scenario']['opts'] \ - for sector_opt in snakemake.config['scenario']['sector_opts'] \ - for lv in snakemake.config['scenario']['lv'] \ - for planning_horizon in snakemake.config['scenario']['planning_horizons'] - } + snakemake = mock_snakemake('make_summary_perfect') + - print(networks_dict) Nyears = 1 @@ -593,5 +593,3 @@ def to_csv(df): if snakemake.config["foresight"]=='myopic': cumulative_cost=calculate_cumulative_cost() cumulative_cost.to_csv(snakemake.config['summary_dir'] + '/' + snakemake.config['run'] + '/csvs/cumulative_cost.csv') - -