Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add basic CI for overnight and myopic #234

Merged
merged 18 commits into from
Apr 11, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
109 changes: 109 additions & 0 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
# SPDX-FileCopyrightText: : 2021 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: CC0-1.0

name: CI

# Caching method based on and described by:
# epassaro (2021): https://dev.to/epassaro/caching-anaconda-environments-in-github-actions-5hde
# and code in GitHub repo: https://github.com/epassaro/cache-conda-envs

on:
push:
branches:
- master
pull_request:
branches:
- master
schedule:
- cron: "0 5 * * TUE"

env:
CONDA_CACHE_NUMBER: 1 # Change this value to manually reset the environment cache
DATA_CACHE_NUMBER: 1

jobs:
build:

strategy:
matrix:
include:
# Matrix required to handle caching with Mambaforge
- os: ubuntu-latest
label: ubuntu-latest
prefix: /usr/share/miniconda3/envs/pypsa-eur

# - os: macos-latest
# label: macos-latest
# prefix: /Users/runner/miniconda3/envs/pypsa-eur

# - os: windows-latest
# label: windows-latest
# prefix: C:\Miniconda3\envs\pypsa-eur

name: ${{ matrix.label }}

runs-on: ${{ matrix.os }}

defaults:
run:
shell: bash -l {0}

steps:
- uses: actions/checkout@v2

- name: Clone pypsa-eur and technology-data repositories
run: |
git clone https://github.com/pypsa/pypsa-eur ../pypsa-eur
git clone https://github.com/pypsa/technology-data ../technology-data
cp ../pypsa-eur/test/config.test1.yaml ../pypsa-eur/config.yaml

- name: Setup secrets
run: |
echo -ne "url: ${CDSAPI_URL}\nkey: ${CDSAPI_TOKEN}\n" > ~/.cdsapirc

- name: Add solver to environment
run: |
echo -e " - coincbc\n - ipopt<3.13.3" >> ../pypsa-eur/envs/environment.yaml

- name: Setup Mambaforge
uses: conda-incubator/setup-miniconda@v2
with:
miniforge-variant: Mambaforge
miniforge-version: latest
activate-environment: pypsa-eur
use-mamba: true

- name: Set cache dates
run: |
echo "DATE=$(date +'%Y%m%d')" >> $GITHUB_ENV
echo "WEEK=$(date +'%Y%U')" >> $GITHUB_ENV

- name: Cache data and cutouts folders
uses: actions/cache@v3
with:
path: |
data
../pypsa-eur/cutouts
../pypsa-eur/data
key: data-cutouts-${{ env.WEEK }}-${{ env.DATA_CACHE_NUMBER }}

- name: Create environment cache
uses: actions/cache@v2
id: cache
with:
path: ${{ matrix.prefix }}
key: ${{ matrix.label }}-conda-${{ env.DATE }}-${{ env.CONDA_CACHE_NUMBER }}

- name: Update environment due to outdated or unavailable cache
run: mamba env update -n pypsa-eur -f ../pypsa-eur/envs/environment.yaml
if: steps.cache.outputs.cache-hit != 'true'

- name: Test snakemake workflow
run: |
conda activate pypsa-eur
conda list
cp test/config.overnight.yaml config.yaml
snakemake -call solve_all_networks
cp test/config.myopic.yaml config.yaml
snakemake -call solve_all_networks
26 changes: 15 additions & 11 deletions Snakefile
Original file line number Diff line number Diff line change
Expand Up @@ -45,18 +45,22 @@ rule prepare_sector_networks:
**config['scenario'])

datafiles = [
"eea/UNFCCC_v23.csv",
"switzerland-sfoe/switzerland-new_format.csv",
"nuts/NUTS_RG_10M_2013_4326_LEVL_2.geojson",
"myb1-2017-nitro.xls",
"Industrial_Database.csv",
"emobility/KFZ__count",
"emobility/Pkw__count",
"data/eea/UNFCCC_v23.csv",
"data/switzerland-sfoe/switzerland-new_format.csv",
"data/nuts/NUTS_RG_10M_2013_4326_LEVL_2.geojson",
"data/myb1-2017-nitro.xls",
"data/Industrial_Database.csv",
"data/emobility/KFZ__count",
"data/emobility/Pkw__count",
"data/h2_salt_caverns_GWh_per_sqkm.geojson",
directory("data/eurostat-energy_balances-june_2016_edition"),
directory("data/eurostat-energy_balances-may_2018_edition"),
directory("data/jrc-idees-2015"),
]

if config.get('retrieve_sector_databundle', True):
rule retrieve_sector_databundle:
output: expand('data/{file}', file=datafiles)
output: *datafiles
log: "logs/retrieve_sector_databundle.log"
script: 'scripts/retrieve_sector_databundle.py'

Expand Down Expand Up @@ -252,9 +256,9 @@ rule build_biomass_potentials:
enspreso_biomass=HTTP.remote("https://cidportal.jrc.ec.europa.eu/ftp/jrc-opendata/ENSPRESO/ENSPRESO_BIOMASS.xlsx", keep_local=True),
nuts2="data/nuts/NUTS_RG_10M_2013_4326_LEVL_2.geojson", # https://gisco-services.ec.europa.eu/distribution/v2/nuts/download/#nuts21
regions_onshore=pypsaeur("resources/regions_onshore_elec_s{simpl}_{clusters}.geojson"),
nuts3_population="../pypsa-eur/data/bundle/nama_10r_3popgdp.tsv.gz",
swiss_cantons="../pypsa-eur/data/bundle/ch_cantons.csv",
swiss_population="../pypsa-eur/data/bundle/je-e-21.03.02.xls",
nuts3_population=pypsaeur("data/bundle/nama_10r_3popgdp.tsv.gz"),
swiss_cantons=pypsaeur("data/bundle/ch_cantons.csv"),
swiss_population=pypsaeur("data/bundle/je-e-21.03.02.xls"),
country_shapes=pypsaeur('resources/country_shapes.geojson')
output:
biomass_potentials_all='resources/biomass_potentials_all_s{simpl}_{clusters}.csv',
Expand Down
4 changes: 3 additions & 1 deletion scripts/build_biomass_potentials.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,9 @@ def build_nuts2_shapes():
nuts2 = gpd.GeoDataFrame(gpd.read_file(snakemake.input.nuts2).set_index('id').geometry)

countries = gpd.read_file(snakemake.input.country_shapes).set_index('name')
missing = countries.loc[["AL", "RS", "BA"]]
missing_iso2 = countries.index.intersection(["AL", "RS", "BA"])
missing = countries.loc[missing_iso2]

nuts2.rename(index={"ME00": "ME", "MK00": "MK"}, inplace=True)

return nuts2.append(missing)
Expand Down
54 changes: 30 additions & 24 deletions scripts/prepare_sector_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -252,6 +252,7 @@ def create_network_topology(n, prefix, carriers=["DC"], connector=" -> ", bidire

ln_attrs = ["bus0", "bus1", "length"]
lk_attrs = ["bus0", "bus1", "length", "underwater_fraction"]
lk_attrs = n.links.columns.intersection(lk_attrs)

candidates = pd.concat([
n.lines[ln_attrs],
Expand Down Expand Up @@ -1045,18 +1046,20 @@ def add_storage_and_grids(n, costs):
)

cavern_types = snakemake.config["sector"]["hydrogen_underground_storage_locations"]
h2_caverns = pd.read_csv(snakemake.input.h2_cavern, index_col=0)[cavern_types].sum(axis=1)

# only use sites with at least 2 TWh potential
h2_caverns = h2_caverns[h2_caverns > 2]
h2_caverns = pd.read_csv(snakemake.input.h2_cavern, index_col=0)

# convert TWh to MWh
h2_caverns = h2_caverns * 1e6
if not h2_caverns.empty and options['hydrogen_underground_storage']:

h2_caverns = h2_caverns[cavern_types].sum(axis=1)

# clip at 1000 TWh for one location
h2_caverns.clip(upper=1e9, inplace=True)
# only use sites with at least 2 TWh potential
h2_caverns = h2_caverns[h2_caverns > 2]

# convert TWh to MWh
h2_caverns = h2_caverns * 1e6

if options['hydrogen_underground_storage']:
# clip at 1000 TWh for one location
h2_caverns.clip(upper=1e9, inplace=True)

logger.info("Add hydrogen underground storage")

Expand Down Expand Up @@ -1155,23 +1158,26 @@ def add_storage_and_grids(n, costs):
# apply k_edge_augmentation weighted by length of complement edges
k_edge = options.get("gas_network_connectivity_upgrade", 3)
augmentation = k_edge_augmentation(G, k_edge, avail=complement_edges.values)
new_gas_pipes = pd.DataFrame(augmentation, columns=["bus0", "bus1"])
new_gas_pipes["length"] = new_gas_pipes.apply(haversine, axis=1)

new_gas_pipes.index = new_gas_pipes.apply(
lambda x: f"gas pipeline new {x.bus0} <-> {x.bus1}", axis=1)
if list(augmentation):

n.madd("Link",
new_gas_pipes.index,
bus0=new_gas_pipes.bus0 + " gas",
bus1=new_gas_pipes.bus1 + " gas",
p_min_pu=-1, # new gas pipes are bidirectional
p_nom_extendable=True,
length=new_gas_pipes.length,
capital_cost=new_gas_pipes.length * costs.at['CH4 (g) pipeline', 'fixed'],
carrier="gas pipeline new",
lifetime=costs.at['CH4 (g) pipeline', 'lifetime']
)
new_gas_pipes = pd.DataFrame(augmentation, columns=["bus0", "bus1"])
new_gas_pipes["length"] = new_gas_pipes.apply(haversine, axis=1)

new_gas_pipes.index = new_gas_pipes.apply(
lambda x: f"gas pipeline new {x.bus0} <-> {x.bus1}", axis=1)

n.madd("Link",
new_gas_pipes.index,
bus0=new_gas_pipes.bus0 + " gas",
bus1=new_gas_pipes.bus1 + " gas",
p_min_pu=-1, # new gas pipes are bidirectional
p_nom_extendable=True,
length=new_gas_pipes.length,
capital_cost=new_gas_pipes.length * costs.at['CH4 (g) pipeline', 'fixed'],
carrier="gas pipeline new",
lifetime=costs.at['CH4 (g) pipeline', 'lifetime']
)

if options["H2_retrofit"]:

Expand Down
Loading