Skip to content

Commit c10a10c

Browse files
committed
Fix and add pre-commit
1 parent 96c66e0 commit c10a10c

20 files changed

+51
-235
lines changed

.github/workflows/test.yaml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ jobs:
6262
- name: Setup cache keys
6363
run: |
6464
echo "WEEK=$(date +'%Y%U')" >> $GITHUB_ENV # data and cutouts
65-
65+
6666
- uses: actions/cache@v4
6767
with:
6868
path: |
@@ -88,7 +88,7 @@ jobs:
8888
run: |
8989
conda env update -n pypsa-de -f ${{ env.env_file }}
9090
echo "Run conda list" && conda list
91-
91+
9292
- name: Setup ixmp4 access
9393
run: |
9494
mkdir -p ~/.local/share/ixmp4/
@@ -161,7 +161,7 @@ jobs:
161161
else
162162
echo "pinned=false" >> $GITHUB_ENV
163163
fi
164-
164+
165165
- name: Setup cache keys
166166
if: env.pinned == 'false'
167167
run: |
@@ -221,4 +221,4 @@ jobs:
221221
logs
222222
.snakemake/log
223223
results
224-
retention-days: 3
224+
retention-days: 3

.pre-commit-config.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ repos:
2727
rev: v2.4.1
2828
hooks:
2929
- id: codespell
30-
args: ['--ignore-regex="(\b[A-Z]+\b)"', '--ignore-words-list=fom,appartment,bage,ore,setis,tabacco,berfore,vor,pris'] # Ignore capital case words, e.g. country codes
30+
args: ['--ignore-regex="(\b[A-Z]+\b)"', '--ignore-words-list=fom,appartment,bage,ore,setis,tabacco,berfore,vor,pris,GuD,Dezember,Juni,Juli,Produktion,WorstCase'] # Ignore capital case words, e.g. country codes
3131
types_or: [python, rst, markdown]
3232
files: ^(scripts|doc)/
3333

config/test/config.dach.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,4 +54,4 @@ electricity:
5454
solving:
5555
solver:
5656
name: highs
57-
options: highs-simplex
57+
options: highs-simplex

scripts/_helpers.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ def get_scenarios(run):
4747
fn = Path(scenario_config["file"])
4848
if fn.exists():
4949
scenarios = yaml.safe_load(fn.read_text())
50-
if scenarios == None:
50+
if scenarios is None:
5151
print(
5252
"WARNING! Scenario management enabled but scenarios file appears to be empty."
5353
)

scripts/add_existing_baseyear.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
update_config_from_wildcards,
2424
)
2525
from scripts.add_electricity import sanitize_carriers
26+
from scripts.build_powerplants import add_custom_powerplants
2627
from scripts.definitions.heat_system import HeatSystem
2728
from scripts.prepare_sector_network import (
2829
cluster_heat_buses,
@@ -35,8 +36,6 @@
3536
idx = pd.IndexSlice
3637
spatial = SimpleNamespace()
3738

38-
from scripts.build_powerplants import add_custom_powerplants
39-
4039

4140
def add_build_year_to_new_assets(n: pypsa.Network, baseyear: int) -> None:
4241
"""
@@ -496,8 +495,8 @@ def add_chp_plants(n, grouping_years, costs, baseyear):
496495

497496
# drop assets which are already phased out / decommissioned
498497
# drop hydro, waste and oil fueltypes for CHP
499-
limit = np.max(grouping_years)
500-
drop_fueltypes = ["Hydro", "Other", "Waste", "nicht biogener Abfall"]
498+
limit = np.max(grouping_years) # noqa
499+
drop_fueltypes = ["Hydro", "Other", "Waste", "nicht biogener Abfall"] # noqa
501500
chp = ppl.query(
502501
"Set == 'CHP' and (DateOut >= @baseyear or DateOut != DateOut) and (DateIn <= @limit or DateIn != DateIn) and Fueltype not in @drop_fueltypes"
503502
).copy()
@@ -1052,7 +1051,6 @@ def add_heating_capacities_installed_before_baseyear(
10521051
)
10531052

10541053
# delete links with capacities below threshold
1055-
threshold = snakemake.params.existing_capacities["threshold_capacity"]
10561054
n.remove(
10571055
"Link",
10581056
[

scripts/build_hourly_heat_demand.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
configure_logging,
2020
generate_periodic_profiles,
2121
get_snapshots,
22+
mock_snakemake,
2223
set_scenario_config,
2324
)
2425

scripts/make_summary.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,14 +11,12 @@
1111

1212
import numpy as np
1313
import pandas as pd
14-
15-
pd.set_option("future.no_silent_downcasting", True)
16-
1714
import pypsa
1815

1916
from scripts._helpers import configure_logging, get_snapshots, set_scenario_config
2017
from scripts.prepare_sector_network import prepare_costs
2118

19+
pd.set_option("future.no_silent_downcasting", True)
2220
idx = pd.IndexSlice
2321
logger = logging.getLogger(__name__)
2422
opt_name = {"Store": "e", "Line": "s", "Transformer": "s"}

scripts/make_summary_perfect.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,15 +10,13 @@
1010
import numpy as np
1111
import pandas as pd
1212
import pypsa
13-
from _helpers import set_scenario_config
1413
from make_summary import (
1514
assign_carriers,
1615
assign_locations,
1716
calculate_cfs, # noqa: F401
1817
calculate_nodal_cfs, # noqa: F401
1918
calculate_nodal_costs, # noqa: F401
2019
)
21-
from prepare_sector_network import prepare_costs
2220
from pypsa.descriptors import get_active_assets
2321
from six import iteritems
2422

scripts/plot_statistics_comparison.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,6 @@ def read_csv(input, output):
7777
keys=network_labels,
7878
)
7979
# get plot label and drop from index
80-
label = df.columns.get_level_values(1).unique()[0]
8180
df.columns = df.columns.droplevel(1)
8281
except Exception as e:
8382
print(f"Error reading csv file for {output}: {e}")

scripts/prepare_perfect_foresight.py

Lines changed: 1 addition & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@
66
"""
77

88
import logging
9-
from typing import List
109

1110
import numpy as np
1211
import pandas as pd
@@ -563,40 +562,7 @@ def apply_time_segmentation_perfect(
563562
n.snapshot_weightings = n.snapshot_weightings.mul(sn_weightings, axis=0)
564563

565564

566-
def update_heat_pump_efficiency(n: pypsa.Network, years: list[int]) -> None:
567-
"""
568-
Update the efficiency of heat pumps from previous years to current year
569-
(e.g. 2030 heat pumps receive 2040 heat pump COPs in 2030).
570-
571-
Note: this also updates the efficiency of heat pumps in preceding years for previous years, which should have no effect (e.g. 2040 heat pumps receive 2030 COPs in 2030).
572-
573-
Parameters
574-
----------
575-
n : pypsa.Network
576-
The concatenated network.
577-
years : list[int]
578-
List of planning horizon years.
579-
580-
Returns
581-
-------
582-
None
583-
This function updates the efficiency in place and does not return a value.
584-
"""
585-
586-
# get names of all heat pumps
587-
heat_pump_idx = n.links.index[n.links.index.str.contains("heat pump")]
588-
for year in years:
589-
# for each heat pump type, correct efficiency is the efficiency of that technology built in <year>
590-
correct_efficiency = n.links_t["efficiency"].loc[
591-
(year, slice(None)), heat_pump_idx.str[:-4] + str(year)
592-
]
593-
# in <year>, set the efficiency of all heat pumps to the correct efficiency
594-
n.links_t["efficiency"].loc[(year, slice(None)), heat_pump_idx] = (
595-
correct_efficiency.values
596-
)
597-
598-
599-
def update_heat_pump_efficiency(n: pypsa.Network, years: List[int]):
565+
def update_heat_pump_efficiency(n: pypsa.Network, years: list[int]):
600566
"""
601567
Update the efficiency of heat pumps from previous years to current year
602568
(e.g. 2030 heat pumps receive 2040 heat pump COPs in 2030).

0 commit comments

Comments
 (0)