Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 32 additions & 13 deletions .github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -40,12 +40,14 @@ jobs:
- name: Setup env file path (ubuntu)
if: matrix.os == 'ubuntu'
run: |
echo "env_file=envs/linux-pinned.yaml" >> $GITHUB_ENV
# echo "env_file=envs/linux-pinned.yaml" >> $GITHUB_ENV
echo "env_file=${{ env.BASE_ENV }}" >> $GITHUB_ENV # TODO Remove

- name: Setup env file path (macos and windows)
if: matrix.os != 'ubuntu'
run: |
echo "env_file=envs/${{ matrix.os }}-pinned.yaml" >> $GITHUB_ENV
echo "env_file=${{ env.BASE_ENV }}" >> $GITHUB_ENV # TODO Remove

- name: Use base env file if it was changed
run: |
Expand All @@ -60,7 +62,7 @@ jobs:
- name: Setup cache keys
run: |
echo "WEEK=$(date +'%Y%U')" >> $GITHUB_ENV # data and cutouts

- uses: actions/cache@v4
with:
path: |
Expand All @@ -71,7 +73,7 @@ jobs:
- uses: conda-incubator/setup-miniconda@v3
with:
miniforge-version: latest
activate-environment: pypsa-eur
activate-environment: pypsa-de
channel-priority: strict

- name: Cache Conda env
Expand All @@ -84,13 +86,21 @@ jobs:
- name: Update environment
if: steps.cache-env.outputs.cache-hit != 'true'
run: |
conda env update -n pypsa-eur -f ${{ env.env_file }}
conda env update -n pypsa-de -f ${{ env.env_file }}
echo "Run conda list" && conda list

- name: Run pylint check on scripts
# check for undefined variables to reuse functions across scripts

- name: Setup ixmp4 access
run: |
pylint --disable=all --enable=E0601 --output-format=parseable scripts/add_* scripts/prepare_* scripts/solve_*
mkdir -p ~/.local/share/ixmp4/
cat > ~/.local/share/ixmp4/credentials.toml << 'EOL'
${{ secrets.IXMP4_ACCESS }}
EOL
shell: bash

# - name: Run pylint check on scripts
# # check for undefined variables to reuse functions across scripts
# run: |
# pylint --disable=all --enable=E0601,E0606 --output-format=parseable scripts/add_* scripts/prepare_* scripts/solve_*

- name: Run snakemake test workflows
run: |
Expand Down Expand Up @@ -138,7 +148,8 @@ jobs:
echo "env_file=${{ env.BASE_ENV }}" >> $GITHUB_ENV
else
echo "Base env ${{ env.BASE_ENV }} not changed. Using pinned envs."
echo "env_file=envs/linux-pinned.yaml" >> $GITHUB_ENV
# echo "env_file=envs/linux-pinned.yaml" >> $GITHUB_ENV
echo "env_file=${{ env.BASE_ENV }}" >> $GITHUB_ENV # TODO Remove
fi

# Only run checks if package is not pinned
Expand All @@ -150,7 +161,7 @@ jobs:
else
echo "pinned=false" >> $GITHUB_ENV
fi

- name: Setup cache keys
if: env.pinned == 'false'
run: |
Expand All @@ -167,7 +178,7 @@ jobs:
- uses: conda-incubator/setup-miniconda@v3
if: env.pinned == 'false'
with:
activate-environment: pypsa-eur
activate-environment: pypsa-de

- name: Cache Conda env
if: env.pinned == 'false'
Expand All @@ -180,9 +191,17 @@ jobs:
- name: Update environment
if: env.pinned == 'false' && steps.cache-env.outputs.cache-hit != 'true'
run: |
conda env update -n pypsa-eur -f ${{ env.env_file }}
conda env update -n pypsa-de -f ${{ env.env_file }}
echo "Run conda list" && conda list

- name: Setup ixmp4 access
run: |
mkdir -p ~/.local/share/ixmp4/
cat > ~/.local/share/ixmp4/credentials.toml << 'EOL'
${{ secrets.IXMP4_ACCESS }}
EOL
shell: bash

- name: Install inhouse packages from master
if: env.pinned == 'false'
run: |
Expand All @@ -202,4 +221,4 @@ jobs:
logs
.snakemake/log
results
retention-days: 3
retention-days: 3
9 changes: 9 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
ci:
autoupdate_schedule: monthly

repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: check-added-large-files
args: ['--maxkb=2000']
12 changes: 6 additions & 6 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -54,11 +54,11 @@ install-pinned-macos: _conda_check
# Run default tests
test:
set -e
snakemake solve_elec_networks --configfile config/test/config.electricity.yaml
snakemake --configfile config/test/config.overnight.yaml
snakemake --configfile config/test/config.myopic.yaml
snakemake make_summary_perfect --configfile config/test/config.perfect.yaml
snakemake --configfile config/test/config.scenarios.yaml -n
echo "Running tests..."
echo "Build scenarios..."
snakemake build_scenarios
echo "Run DACH config..."
snakemake ariadne_all --configfile=config/test/config.dach.yaml
echo "All tests completed successfully."

unit-test:
Expand All @@ -83,4 +83,4 @@ reset:
rm -r ./.snakemake || true; \
rm ./config/config.yaml || true; \
echo "Reset completed." \
) || echo "Reset cancelled."
) || echo "Reset cancelled."
57 changes: 57 additions & 0 deletions config/test/config.dach.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
# SPDX-FileCopyrightText: Contributors to PyPSA-DE <https://github.com/pypsa/pypsa-eur>
#
# SPDX-License-Identifier: CC0-1.0

run:
prefix: "test-sector-myopic-dach"
name:
- KN2045_Bal_v4

scenario:
clusters:
- 5 #current options: 27, 49
countries: ['DE', 'AT', 'CH']

snapshots:
start: "2013-03-01"
end: "2013-03-08"

atlite:
default_cutout: dach-03-2013-sarah3-era5
cutouts:
dach-03-2013-sarah3-era5:
module: [sarah, era5] # in priority order
x: [5., 18.]
y: [45., 56.]
time: ["2013-03-01", "2013-03-08"]

renewable:
onwind:
cutout: dach-03-2013-sarah3-era5
offwind-ac:
cutout: dach-03-2013-sarah3-era5
max_depth: false
offwind-dc:
cutout: dach-03-2013-sarah3-era5
max_depth: false
offwind-float:
cutout: dach-03-2013-sarah3-era5
max_depth: false
min_depth: false
solar:
cutout: dach-03-2013-sarah3-era5
solar-hsat:
cutout: dach-03-2013-sarah3-era5

clustering:
focus_weights: []
temporal:
resolution_sector: 3H

electricity:
renewable_carriers: [solar, solar-hsat, onwind, offwind-ac, offwind-dc] # removed hydro, offwind-float

solving:
solver:
name: highs
options: highs-simplex
2 changes: 1 addition & 1 deletion rules/retrieve.smk
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ if config["enable"]["retrieve"] and config["enable"].get("retrieve_cutout", True
rule retrieve_cutout:
input:
storage(
"https://zenodo.org/records/14936211/files/{cutout}.nc",
"https://zenodo.org/records/15130997/files/{cutout}.nc",
),
output:
CDIR + "{cutout}.nc",
Expand Down
8 changes: 6 additions & 2 deletions scripts/prepare_sector_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -3202,6 +3202,7 @@ def add_biomass(
pop_layout,
biomass_potentials_file,
biomass_transport_costs_file=None,
nyears=1,
):
"""
Add biomass-related components to the PyPSA network.
Expand Down Expand Up @@ -3235,6 +3236,8 @@ def add_biomass(
biomass_transport_costs_file : str, optional
Path to CSV file containing biomass transport costs data.
Required if biomass_transport or biomass_spatial options are True.
nyears : float
Number of years for which to scale the biomass potentials.

Returns
-------
Expand All @@ -3254,7 +3257,7 @@ def add_biomass(
"""
logger.info("Add biomass")

biomass_potentials = pd.read_csv(biomass_potentials_file, index_col=0)
biomass_potentials = pd.read_csv(biomass_potentials_file, index_col=0) * nyears

# need to aggregate potentials if gas not nodally resolved
if (
Expand Down Expand Up @@ -3377,7 +3380,7 @@ def add_biomass(
if options["solid_biomass_import"].get("enable", False):
biomass_import_price = options["solid_biomass_import"]["price"]
# convert TWh in MWh
biomass_import_max_amount = options["solid_biomass_import"]["max_amount"] * 1e6
biomass_import_max_amount = options["solid_biomass_import"]["max_amount"] * 1e6 * nyears
biomass_import_upstream_emissions = options["solid_biomass_import"][
"upstream_emissions_factor"
]
Expand Down Expand Up @@ -5568,6 +5571,7 @@ def add_enhanced_geothermal(
pop_layout=pop_layout,
biomass_potentials_file=snakemake.input.biomass_potentials,
biomass_transport_costs_file=snakemake.input.biomass_transport_costs,
nyears=nyears,
)

if options["ammonia"]:
Expand Down
5 changes: 4 additions & 1 deletion scripts/pypsa-de/additional_functionality.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,10 @@ def h2_production_limits(n, investment_year, limits_volume_min, limits_volume_ma

def electricity_import_limits(n, investment_year, limits_volume_max):
for ct in limits_volume_max["electricity_import"]:
limit = limits_volume_max["electricity_import"][ct][investment_year] * 1e6
limit = limits_volume_max["electricity_import"][ct][investment_year] * 1e6

if limit < 0:
limit *= n.snapshot_weightings.generators.sum() / 8760

logger.info(f"limiting electricity imports in {ct} to {limit / 1e6} TWh/a")

Expand Down
14 changes: 9 additions & 5 deletions scripts/pypsa-de/export_ariadne_variables.py
Original file line number Diff line number Diff line change
Expand Up @@ -386,7 +386,8 @@ def fill_if_lifetime_inf(n, carrier, lifetime, component="links"):

for component in ["lines", "links", "generators", "stores", "storage_units"]:
df = getattr(n, component)

if df.empty:
continue
decentral_idx = df.index[df.index.str.contains("decentral|rural|rooftop")]
not_decentral_idx = df.index[~df.index.str.contains("decentral|rural|rooftop")]

Expand Down Expand Up @@ -1546,9 +1547,12 @@ def get_secondary_energy(n, region, _industry_demand):
+ var["Secondary Energy|Electricity|Biomass|w/ CCS"]
)

var["Secondary Energy|Electricity|Hydro"] = electricity_supply.get(
"hydro"
) + electricity_supply.get("ror")
var["Secondary Energy|Electricity|Hydro"] = electricity_supply.reindex(
[
"PHS",
"hydro",
]
).sum()
# ! Neglecting PHS here because it is storage infrastructure

var["Secondary Energy|Electricity|Nuclear"] = electricity_supply.filter(
Expand Down Expand Up @@ -2878,7 +2882,7 @@ def get_emissions(n, region, _energy_totals, industry_demand):
# E and Biofuels with CC
var["Carbon Sequestration|Other"] = co2_storage.mul(ccs_fraction)[
~co2_storage.index.str.contains("bio|process")
].sum() + co2_storage.mul(ccs_fraction).get("process emissions CC") * (
].sum() + co2_storage.mul(ccs_fraction).get("process emissions CC", 0) * (
1 - pe_fossil_fraction
)

Expand Down
10 changes: 7 additions & 3 deletions scripts/pypsa-de/modify_prenetwork.py
Original file line number Diff line number Diff line change
Expand Up @@ -839,8 +839,12 @@ def aladin_mobility_demand(n):
# get aladin data
aladin_demand = pd.read_csv(snakemake.input.aladin_demand, index_col=0)

simulation_period_correction_factor = (
n.snapshot_weightings.objective.sum() / 8760
)

# oil demand
oil_demand = aladin_demand.Liquids
oil_demand = aladin_demand.Liquids * simulation_period_correction_factor
oil_index = n.loads[
(n.loads.carrier == "land transport oil") & (n.loads.index.str[:2] == "DE")
].index
Expand All @@ -853,7 +857,7 @@ def aladin_mobility_demand(n):
)

# hydrogen demand
h2_demand = aladin_demand.Hydrogen
h2_demand = aladin_demand.Hydrogen * simulation_period_correction_factor
h2_index = n.loads[
(n.loads.carrier == "land transport fuel cell")
& (n.loads.index.str[:2] == "DE")
Expand All @@ -867,7 +871,7 @@ def aladin_mobility_demand(n):
)

# electricity demand
ev_demand = aladin_demand.Electricity
ev_demand = aladin_demand.Electricity * simulation_period_correction_factor
ev_index = n.loads[
(n.loads.carrier == "land transport EV") & (n.loads.index.str[:2] == "DE")
].index
Expand Down
Loading