Skip to content

Test models

Test models #159

Workflow file for this run

# SPDX-FileCopyrightText: PyPSA Contributors
#
# SPDX-License-Identifier: MIT
name: Test models
on:
push:
branches:
- master
- release-branch-v*
pull_request:
branches: ['*']
schedule:
- cron: "0 5 * * *"
env:
PACKAGE_NAME: 'pypsa'
# Cancel any in-progress runs when a new run is triggered
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test-pypsa-eur:
name: PyPSA-Eur
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
version:
- master
# - latest
defaults:
run:
shell: bash -l {0}
steps:
- uses: actions/checkout@v6
- uses: dorny/paths-filter@v3
id: filter
with:
filters: |
src:
- 'pypsa/**'
- 'pyproject.toml'
- '.github/workflows/test-models.yml'
- name: Skip - no source changes
if: steps.filter.outputs.src != 'true' && github.event_name != 'schedule'
run: |
echo "No source code changes detected, skipping pypsa-eur tests"
- name: Free up disk space
if: steps.filter.outputs.src == 'true' || github.event_name == 'schedule'
run: |
echo "Initial disk space"
df -h
echo "Free up disk space"
sudo rm -rf /usr/share/dotnet /usr/local/lib/android /opt/ghc /opt/hostedtoolcache/CodeQL
sudo docker image prune --all --force
sudo docker builder prune -a --force
echo "Final disk space"
df -h
- uses: actions/checkout@v6
if: steps.filter.outputs.src == 'true' || github.event_name == 'schedule'
with:
repository: PyPSA/pypsa-eur
ref: master
- name: Check out latest release
if: (steps.filter.outputs.src == 'true' || github.event_name == 'schedule') && matrix.version == 'latest'
run: |
git fetch --tags
latest_tag=$(git describe --tags `git rev-list --tags --max-count=1`)
git checkout $latest_tag
- name: Setup Pixi
if: steps.filter.outputs.src == 'true' || github.event_name == 'schedule'
uses: prefix-dev/setup-pixi@v0.9.3
with:
pixi-version: v0.59.0
cache: true
# Do not cache in branches
cache-write: ${{ github.event_name == 'push' && github.ref_name == 'main' }}
- name: Setup cache keys
if: steps.filter.outputs.src == 'true' || github.event_name == 'schedule'
run: |
echo "WEEK=$(date +'%Y%U')" >> $GITHUB_ENV # data and cutouts
# Only run check if package is not pinned
- name: Check if inhouse package is pinned
if: steps.filter.outputs.src == 'true' || github.event_name == 'schedule'
run: |
grep_line=$(grep -- '- pypsa' envs/environment.yaml)
if [[ $grep_line == *"<"* || $grep_line == *"=="* ]]; then
echo "pinned=true" >> $GITHUB_ENV
else
echo "pinned=false" >> $GITHUB_ENV
fi
- uses: actions/cache@v5
if: (steps.filter.outputs.src == 'true' || github.event_name == 'schedule') && env.pinned == 'false'
with:
path: |
data
cutouts
key: data-cutouts-pypsa-eur-${{ env.WEEK }}
- name: Install package from ref
if: (steps.filter.outputs.src == 'true' || github.event_name == 'schedule') && env.pinned == 'false'
run: |
pixi remove ${{ env.PACKAGE_NAME }}
pixi add --pypi --git https://github.com/${{ github.repository }}.git ${{ github.event.repository.name }} --rev ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
- name: Run snakemake test workflows
if: (steps.filter.outputs.src == 'true' || github.event_name == 'schedule') && env.pinned == 'false'
run: |
pixi run integration-tests
- name: Run unit tests
if: (steps.filter.outputs.src == 'true' || github.event_name == 'schedule') && env.pinned == 'false'
run: |
pixi run unit-tests
- name: Upload artifacts
if: (steps.filter.outputs.src == 'true' || github.event_name == 'schedule') && env.pinned == 'false'
uses: actions/upload-artifact@v6
with:
name: results-pypsa-eur-${{ matrix.version }}
path: |
logs
.snakemake/log
results
retention-days: 3
- name: Show remaining disk space
if: always()
run: df -h
# Temporarily disabled until PyPSA-DE issues are resolved. This is not a problem of PyPSA.
# test-pypsa-de:
# name: PyPSA-DE
# runs-on: ubuntu-latest
# strategy:
# fail-fast: false
# matrix:
# version:
# - master
# # - latest
# defaults:
# run:
# shell: bash -l {0}
# steps:
# - uses: actions/checkout@v6
# with:
# repository: PyPSA/pypsa-de
# ref: main
# - name: Check out latest release
# if: matrix.version == 'latest'
# run: |
# git fetch --tags
# latest_tag=$(git describe --tags `git rev-list --tags --max-count=1`)
# git checkout $latest_tag
# - name: Setup cache keys
# run: |
# echo "WEEK=$(date +'%Y%U')" >> $GITHUB_ENV # data and cutouts
# # Only run check if package is not pinned
# - name: Check if inhouse package is pinned
# run: |
# grep_line=$(grep -- '- pypsa' envs/environment.yaml)
# if [[ $grep_line == *"<"* || $grep_line == *"=="* ]]; then
# echo "pinned=true" >> $GITHUB_ENV
# else
# echo "pinned=false" >> $GITHUB_ENV
# fi
# - uses: actions/cache@v5
# if: env.pinned == 'false'
# with:
# path: |
# data
# cutouts
# resources/ariadne_database.csv
# key: data-cutouts-pypsa-de-${{ env.WEEK }}
# - uses: conda-incubator/setup-miniconda@v3
# if: env.pinned == 'false'
# with:
# miniforge-version: latest
# activate-environment: pypsa-de
# channel-priority: strict
# - name: Cache Conda env
# if: env.pinned == 'false'
# uses: actions/cache@v5
# with:
# path: ${{ env.CONDA }}/envs
# key: conda-pypsa-de-${{ env.WEEK }}-${{ hashFiles('envs/linux-64.lock.yaml') }}
# id: cache-env
# - name: Update environment
# if: env.pinned == 'false' && steps.cache-env.outputs.cache-hit != 'true'
# run: |
# conda env update -n pypsa-de -f envs/linux-64.lock.yaml
# echo "Run conda list" && conda list
# - name: Install package from ref
# if: env.pinned == 'false'
# run: |
# python -m pip install git+https://github.com/${{ github.repository }}@${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
# - name: Run snakemake test workflows
# if: env.pinned == 'false'
# run: |
# make test
# - name: Run unit tests
# if: env.pinned == 'false'
# run: |
# make unit-test
# - name: Upload artifacts
# if: env.pinned == 'false'
# uses: actions/upload-artifact@v6
# with:
# name: results-pypsa-de-${{ matrix.version }}
# path: |
# logs
# .snakemake/log
# results
# retention-days: 3