Skip to content

Commit e75454a

Browse files
committed
Merge branch 'main' into fix/nightly-fail-remove-publish
2 parents 1ad6098 + 5f534fd commit e75454a

File tree

6 files changed

+46
-8
lines changed

6 files changed

+46
-8
lines changed

.github/workflows/build-test-release.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -93,11 +93,11 @@ jobs:
9393
9494
- name: Test and Coverage
9595
run: |
96-
pytest --cov-report=xml:coverage.xml --cov-fail-under=0
96+
pytest
9797
9898
# Fix relative paths in coverage file
9999
# Known bug: https://community.sonarsource.com/t/sonar-on-github-actions-with-python-coverage-source-issue/36057
100-
sed -i 's@/home/runner/work/power-grid-model-io/power-grid-model-io@/github/workspace@g' coverage.xml
100+
sed -i 's@/home/runner/work/power-grid-model-io/power-grid-model-io@/github/workspace@g' python_coverage.xml
101101
102102
- name: SonarCloud Scan
103103
if: ${{ (github.event_name == 'push') || (github.event.pull_request.head.repo.owner.login == 'PowerGridModel') }}

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ __pycache__/
1616
.mypy_cache/
1717
.pytest_cache/
1818
cov_html/
19+
python_coverage.xml
1920

2021
# Virtual environment
2122
venv*/

pyproject.toml

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,14 @@ power_grid_model_io = ["config/**/*.yaml", "py.typed"]
8181

8282
[tool.pytest.ini_options]
8383
testpaths = ["tests/unit"]
84-
addopts = ["--cov=power_grid_model_io", "--cov-report=term", "--cov-report=html:cov_html", "--cov-fail-under=98.5"]
84+
addopts = [
85+
"--cov=power_grid_model_io",
86+
"--cov-report=term",
87+
"--cov-report=html:cov_html",
88+
"--cov-report=xml:python_coverage.xml",
89+
"--cov-fail-under=98.5",
90+
]
91+
xfail_strict = true
8592

8693
[tool.black]
8794
line-length = 120

sonar-project.properties

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,4 +10,4 @@ sonar.sources=src
1010
sonar.tests=tests
1111
sonar.sourceEncoding=UTF-8
1212
sonar.python.version=3
13-
sonar.python.coverage.reportPaths=/github/workspace/coverage.xml
13+
sonar.python.coverage.reportPaths=python_coverage.xml

src/power_grid_model_io/converters/pgm_json_converter.py

Lines changed: 21 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@
1313

1414
import numpy as np
1515
from power_grid_model import initialize_array
16-
from power_grid_model._utils import is_nan
1716
from power_grid_model.data_types import ComponentList, Dataset, SingleDataset, SinglePythonDataset
1817
from power_grid_model.utils import json_deserialize, json_serialize
1918

@@ -22,6 +21,12 @@
2221
from power_grid_model_io.data_types import ExtraInfo, StructuredData
2322
from power_grid_model_io.utils.dict import merge_dicts
2423

24+
_NAN_FUNC = {
25+
np.dtype("f8"): lambda x: np.all(np.isnan(x)),
26+
np.dtype("i4"): lambda x: np.all(x == np.iinfo(np.dtype("i4")).min),
27+
np.dtype("i1"): lambda x: np.all(x == np.iinfo(np.dtype("i1")).min),
28+
}
29+
2530

2631
class PgmJsonConverter(BaseConverter[StructuredData]):
2732
"""
@@ -216,8 +221,7 @@ def _is_batch(data: Dataset) -> bool:
216221
is_batch = is_dense_batch or is_sparse_batch
217222
return bool(is_batch)
218223

219-
@staticmethod
220-
def _serialize_dataset(data: SingleDataset, extra_info: Optional[ExtraInfo] = None) -> SinglePythonDataset:
224+
def _serialize_dataset(self, data: SingleDataset, extra_info: Optional[ExtraInfo] = None) -> SinglePythonDataset:
221225
"""This function converts a single power-grid-model dataset to a structured dataset
222226
223227
Args:
@@ -248,7 +252,7 @@ def _serialize_dataset(data: SingleDataset, extra_info: Optional[ExtraInfo] = No
248252
{
249253
attribute: obj[attribute].tolist()
250254
for attribute in objects.dtype.names
251-
if not is_nan(obj[attribute])
255+
if not self._is_nan(obj[attribute])
252256
},
253257
extra_info.get(obj["id"], {}),
254258
)
@@ -296,3 +300,16 @@ def _get_first_by(data: List[Dict[str, Any]], field: str, value: Any) -> Optiona
296300
return entry
297301

298302
return None
303+
304+
@staticmethod
305+
def _is_nan(data: np.ndarray) -> bool:
306+
"""
307+
Determine whether the data point is valid
308+
Args:
309+
data: a single scalar or numpy array
310+
311+
Returns:
312+
True when all the data points are invalid
313+
False otherwise
314+
"""
315+
return bool(_NAN_FUNC[data.dtype](data))

tests/unit/converters/test_pgm_json_converter.py

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -155,3 +155,16 @@ def test_serialize_dataset(converter: PgmJsonConverter, pgm_input_data: SingleDa
155155
extra_info: ExtraInfo = {1: {"dummy": "data"}}
156156
structured_data_with_extra_info = converter._serialize_dataset(data=pgm_input_data, extra_info=extra_info)
157157
assert structured_data_with_extra_info == {"node": [{"id": 1, "dummy": "data"}, {"id": 2}]}
158+
159+
160+
def test_is_nan(converter: PgmJsonConverter):
161+
single_value = np.array([np.nan])
162+
assert converter._is_nan(single_value)
163+
array_f8 = np.array([0.1, 0.2, np.nan], dtype=np.dtype("f8"))
164+
assert not converter._is_nan(array_f8)
165+
array_i4 = np.array([10, 2, -(2**31), 40], dtype=np.dtype("i4"))
166+
assert not converter._is_nan(array_i4)
167+
array_i1 = np.array([1, 0, -(2**7), 1], dtype=np.dtype("i1"))
168+
assert not converter._is_nan(array_i1)
169+
nan_array = np.array([np.nan, np.nan, np.nan])
170+
assert converter._is_nan(nan_array)

0 commit comments

Comments
 (0)