Skip to content

Commit 7345c7a

Browse files
committed
Merge branch 'develop' into 'master'
v2.3.7 See merge request iek-3/shared-code/fine!347
2 parents 808df71 + 2063c70 commit 7345c7a

File tree

9 files changed

+208
-70
lines changed

9 files changed

+208
-70
lines changed

.gitlab-ci.yml

Lines changed: 26 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ image: mambaorg/micromamba
22

33
stages:
44
- test
5-
- build
5+
- deploy
66

77
variables:
88
DOCKER_HOST: tcp://docker:2375
@@ -38,6 +38,8 @@ variables:
3838
when: never
3939
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS
4040
when: never
41+
- if: $CI_COMMIT_TAG
42+
when: never
4143
retry: 1
4244

4345

@@ -65,6 +67,8 @@ variables:
6567
- requirements.yml
6668
- requirements_dev.yml
6769
when: never
70+
- if: $CI_COMMIT_TAG
71+
when: never
6872
- when: on_success
6973

7074

@@ -100,14 +104,16 @@ variables:
100104
when: never
101105
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS
102106
when: never
107+
- if: $CI_COMMIT_TAG
108+
when: never
103109
- changes:
104110
- pyproject.toml
105111
- requirements.yml
106112
- requirements_dev.yml
107113
when: on_success
108114

109115
.build_template:
110-
stage: build
116+
stage: deploy
111117
image: docker@sha256:c8bb6fa5388b56304dd770c4bc0478de81ce18540173b1a589178c0d31bfce90
112118
services:
113119
- docker:dind@sha256:c8bb6fa5388b56304dd770c4bc0478de81ce18540173b1a589178c0d31bfce90
@@ -121,6 +127,8 @@ test-pypi:
121127
- python -m pip install .[develop]
122128
- python -m pytest -n auto test/
123129
rules:
130+
- if: $CI_COMMIT_TAG
131+
when: never
124132
- if: '$CI_COMMIT_BRANCH == "master"'
125133
- if: '$CI_COMMIT_BRANCH == "develop"'
126134
- if: '$CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "master"'
@@ -203,27 +211,12 @@ test-codestyle:
203211

204212
# Deployment
205213

206-
build-master-latest:
207-
extends: .build_template
208-
script:
209-
# Login to the DockerHub repo using a specialized access token.
210-
# Then, build the docker image with the tested code and tag it
211-
# with the current version, as well as latest.
212-
# Afterwards, push to DockerHub.
213-
- docker login -u fzjiek3 -p $DOCKER_AT
214-
- docker build -t "fzjiek3/fine:latest" .
215-
- docker push fzjiek3/fine:latest
216-
rules:
217-
- if: $CI_PIPELINE_SOURCE == "schedule"
218-
when: never
219-
- if: $CI_COMMIT_BRANCH == "master"
220-
221214
build-tag:
222215
extends: .build_template
223216
script:
224217
- docker login -u fzjiek3 -p $DOCKER_AT
225-
- docker build -t fzjiek3/fine:${CI_COMMIT_TAG} .
226-
- docker push fzjiek3/fine:${CI_COMMIT_TAG}
218+
- docker build -t fzjiek3/fine:${CI_COMMIT_TAG} -t fzjiek3/fine:latest .
219+
- docker push fzjiek3/fine --all-tags
227220
rules:
228221
- if: $CI_PIPELINE_SOURCE == "schedule"
229222
when: never
@@ -242,14 +235,21 @@ build-dev:
242235
- if: $CI_COMMIT_BRANCH == "develop"
243236

244237
pypi-upload:
245-
stage: build
238+
stage: deploy
239+
image: python:3.12
240+
before_script:
241+
- python3 -m pip install --upgrade build
242+
- python3 -m pip install --upgrade twine
243+
variables:
244+
TWINE_USERNAME: $PYPI_USERNAME
245+
TWINE_PASSWORD: $PYPI_PASSWORD
246246
script:
247-
- micromamba install -c conda-forge -n base -y python=3.10
248-
- python -m pip install --upgrade build
249-
- python -m pip install --upgrade twine
250-
- python -m build
251-
- python -m twine upload -u __token__ -p $PYPI_TOKEN dist/*
247+
# Test if the version defined in `pyproject.toml` is the same as the tag
248+
- PYPROJECT_VERSION=$(grep -m 1 version pyproject.toml | tr -s ' ' | tr -d '"' | tr -d "'" | cut -d' ' -f3)
249+
- test PYPROJECT_VERSION = v${CI_COMMIT_TAG}
250+
# Build and push to pypi
251+
- python3 -m build
252+
- python3 -m twine upload dist/*
252253
rules:
253254
- if: $CI_COMMIT_BRANCH == "master" && $CI_COMMIT_TAG
254-
when: manual
255255

docs/source/conf.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@
8080
# The short X.Y version.
8181
version = "2.3"
8282
# The full version, including alpha/beta/rc tags.
83-
release = "2.3.6"
83+
release = "2.3.7"
8484

8585
# The language for content autogenerated by Sphinx. Refer to documentation
8686
# for a list of supported languages.

fine/IOManagement/utilsIO.py

Lines changed: 130 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -223,7 +223,7 @@ def generateIterationDicts(component_dict, investmentPeriods):
223223
return df_iteration_dict, series_iteration_dict, constants_iteration_dict
224224

225225

226-
def addDFVariablesToXarray(xr_ds, component_dict, df_iteration_dict):
226+
def addDFVariablesToXarray(xr_ds, component_dict, df_iteration_dict, locations):
227227
"""Adds all variables whose data is contained in a pd.DataFrame to xarray dataset.
228228
These variables are normally regional time series (dimensions - space, time)
229229
@@ -238,10 +238,33 @@ def addDFVariablesToXarray(xr_ds, component_dict, df_iteration_dict):
238238
values - list of tuple of component class and component name
239239
:type df_iteration_dict: dict
240240
241+
:param locations: esM locations
242+
:type locations: list
243+
241244
:return: xr_ds
242245
"""
246+
# Treat transmission data separately
247+
df_iteration_dict_orig = df_iteration_dict.copy()
248+
df_iteration_dict_transm = {}
249+
df_iteration_dict = {}
250+
for variable_description, description_tuple_list in df_iteration_dict_orig.items():
251+
for description_tuple in description_tuple_list:
252+
# check if data is transmission and time dependent
253+
if "Transmission" in description_tuple[0]:
254+
# add "2dim" to variable_description
255+
if variable_description not in df_iteration_dict_transm.keys():
256+
df_iteration_dict_transm[variable_description] = []
257+
df_iteration_dict_transm[variable_description].append(description_tuple)
243258

244-
for variable_description, description_tuple_list in df_iteration_dict.items():
259+
else:
260+
if variable_description not in df_iteration_dict.keys():
261+
df_iteration_dict[variable_description] = []
262+
df_iteration_dict[variable_description].append(description_tuple)
263+
264+
for (
265+
variable_description,
266+
description_tuple_list,
267+
) in df_iteration_dict_transm.items():
245268
df_dict = {}
246269

247270
for description_tuple in description_tuple_list:
@@ -260,22 +283,48 @@ def addDFVariablesToXarray(xr_ds, component_dict, df_iteration_dict):
260283
data = component_dict[classname][component][variable_description]
261284

262285
multi_index_dataframe = data.stack()
263-
if "Period" in multi_index_dataframe.index.names:
264-
multi_index_dataframe.index.set_names("time", level=1, inplace=True)
265-
multi_index_dataframe.index.set_names("space", level=2, inplace=True)
286+
if set(locations) == set(
287+
component_dict[classname][component][
288+
variable_description
289+
].index.to_list()
290+
):
291+
multi_index_dataframe.index.set_names("space", level=0, inplace=True)
292+
multi_index_dataframe.index.set_names("space_2", level=1, inplace=True)
266293
else:
294+
# split X_X into multiindex
267295
multi_index_dataframe.index.set_names("time", level=0, inplace=True)
268296
multi_index_dataframe.index.set_names("space", level=1, inplace=True)
297+
# use regex to split via location names
298+
import re
299+
300+
pattern = re.compile("(" + "|".join(locations) + ")")
301+
space_index = multi_index_dataframe.index.get_level_values(
302+
"space"
303+
).str.findall(pattern)
304+
time_index = multi_index_dataframe.index.get_level_values("time")
305+
# reconstruct multiindex
306+
multi_index_dataframe.index = pd.MultiIndex.from_tuples(
307+
[
308+
(time_index[i], space_index[i][0], space_index[i][1])
309+
for i in range(len(space_index))
310+
],
311+
names=["time", "space", "space_2"],
312+
)
269313

270314
df_dict[df_description] = multi_index_dataframe
271315

272316
df_variable = pd.concat(df_dict)
273317
df_variable.index.set_names("component", level=0, inplace=True)
274318

275319
ds_component = xr.Dataset()
276-
ds_component[f"ts_{variable_description}"] = (
277-
df_variable.sort_index().to_xarray()
278-
)
320+
if "time" in df_variable.index.names:
321+
ds_component[f"ts_{variable_description}"] = (
322+
df_variable.sort_index().to_xarray()
323+
)
324+
else:
325+
ds_component[f"2d_{variable_description}"] = (
326+
df_variable.sort_index().to_xarray()
327+
)
279328

280329
for comp in df_variable.index.get_level_values(0).unique():
281330
this_class = comp.split("; ")[0]
@@ -294,6 +343,61 @@ def addDFVariablesToXarray(xr_ds, component_dict, df_iteration_dict):
294343
except Exception:
295344
pass
296345

346+
for variable_description, description_tuple_list in df_iteration_dict.items():
347+
df_dict = {}
348+
349+
for description_tuple in description_tuple_list:
350+
classname, component = description_tuple
351+
352+
df_description = f"{classname}; {component}"
353+
354+
# If a . is present in variable name, then the data would be
355+
# another level further in the component_dict
356+
if "." in variable_description:
357+
[var_name, subvar_name] = variable_description.split(".")
358+
if subvar_name.isdigit():
359+
subvar_name = int(subvar_name)
360+
data = component_dict[classname][component][var_name][subvar_name]
361+
else:
362+
data = component_dict[classname][component][variable_description]
363+
364+
multi_index_dataframe = data.stack()
365+
if "Period" in multi_index_dataframe.index.names:
366+
multi_index_dataframe.index.set_names("time", level=1, inplace=True)
367+
multi_index_dataframe.index.set_names("space", level=2, inplace=True)
368+
else:
369+
multi_index_dataframe.index.set_names("time", level=0, inplace=True)
370+
multi_index_dataframe.index.set_names("space", level=1, inplace=True)
371+
372+
df_dict[df_description] = multi_index_dataframe
373+
374+
# check if there is data
375+
if len(df_dict) > 0:
376+
df_variable = pd.concat(df_dict)
377+
df_variable.index.set_names("component", level=0, inplace=True)
378+
379+
ds_component = xr.Dataset()
380+
ds_component[f"ts_{variable_description}"] = (
381+
df_variable.sort_index().to_xarray()
382+
)
383+
384+
for comp in df_variable.index.get_level_values(0).unique():
385+
this_class = comp.split("; ")[0]
386+
this_comp = comp.split("; ")[1]
387+
388+
this_ds_component = (
389+
ds_component.sel(component=comp)
390+
.squeeze()
391+
.reset_coords(names=["component"], drop=True)
392+
)
393+
394+
try:
395+
xr_ds[this_class][this_comp] = xr.merge(
396+
[xr_ds[this_class][this_comp], this_ds_component]
397+
)
398+
except Exception:
399+
pass
400+
297401
return xr_ds
298402

299403

@@ -624,10 +728,27 @@ def addTimeSeriesVariableToDict(
624728
df = comp_var_xr.to_series()
625729
elif drop_component:
626730
df = comp_var_xr.drop("component").to_dataframe().unstack(level=1)
731+
elif "space_2" in comp_var_xr.dims:
732+
df = comp_var_xr.to_dataframe().squeeze()
733+
# merge space and space_2 levels
734+
space_index = df.index.get_level_values("space")
735+
space_2_index = df.index.get_level_values("space_2")
736+
new_space_index = [
737+
f"{space_index[i]}_{space_2_index[i]}" for i in range(len(space_index))
738+
]
739+
df.index = pd.MultiIndex.from_tuples(
740+
[
741+
(df.index.get_level_values("time")[i], new_space_index[i])
742+
for i in range(len(new_space_index))
743+
],
744+
names=["time", "space"],
745+
)
746+
df = df.unstack()
747+
df = df.dropna(axis=1, how="all")
627748
else:
628749
df = comp_var_xr.to_dataframe().unstack(level=1)
629750

630-
if isinstance(df, pd.DataFrame):
751+
if isinstance(df, pd.DataFrame) and "space_2" not in comp_var_xr.dims:
631752
if len(df.columns) > 1:
632753
df.columns = df.columns.droplevel(0)
633754

fine/IOManagement/xarrayIO.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ def convertOptimizationInputToDatasets(esM, useProcessedValues=False):
4646
}
4747

4848
# STEP 4. Add all df variables to xr_ds
49-
xr_dss = utilsIO.addDFVariablesToXarray(xr_dss, component_dict, df_iteration_dict)
49+
xr_dss = utilsIO.addDFVariablesToXarray(xr_dss, component_dict, df_iteration_dict, list(esM.locations))
5050

5151
# STEP 5. Add all series variables to xr_ds
5252
locations = sorted(esm_dict["locations"])

fine/energySystemModel.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
import os
33
import time
44
import warnings
5+
import importlib.util
56

67
import gurobi_logtools as glt
78
import pandas as pd
@@ -2031,7 +2032,11 @@ def optimize(
20312032
################################################################################################################
20322033

20332034
# Set which solver should solve the specified optimization problem
2034-
optimizer = opt.SolverFactory(solver)
2035+
if solver == "gurobi" and importlib.util.find_spec('gurobipy'):
2036+
# Use the direct gurobi solver that uses the Python API.
2037+
optimizer = opt.SolverFactory(solver, solver_io="python")
2038+
else:
2039+
optimizer = opt.SolverFactory(solver)
20352040

20362041
# Set, if specified, the time limit
20372042
if self.solverSpecs["timeLimit"] is not None and solver == "gurobi":

fine/transmission.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -409,15 +409,15 @@ def __init__(
409409
# operationRateMax
410410
self.operationRateMax = operationRateMax
411411
self.fullOperationRateMax = utils.checkAndSetInvestmentPeriodTimeSeries(
412-
esM, name, operationRateMax, self.locationalEligibility
412+
esM, name, operationRateMax, self.locationalEligibility, "2dim"
413413
)
414414
self.aggregatedOperationRateMax = dict.fromkeys(esM.investmentPeriods)
415415
self.processedOperationRateMax = dict.fromkeys(esM.investmentPeriods)
416-
416+
417417
# operationRateFix
418418
self.operationRateFix = operationRateFix
419419
self.fullOperationRateFix = utils.checkAndSetInvestmentPeriodTimeSeries(
420-
esM, name, operationRateFix, self.locationalEligibility
420+
esM, name, operationRateFix, self.locationalEligibility, "2dim"
421421
)
422422
self.aggregatedOperationRateFix = dict.fromkeys(esM.investmentPeriods)
423423
self.processedOperationRateFix = dict.fromkeys(esM.investmentPeriods)

0 commit comments

Comments
 (0)