|
13 | 13 |
|
14 | 14 | import numpy as np |
15 | 15 | from power_grid_model import initialize_array |
16 | | -from power_grid_model._utils import is_nan |
17 | 16 | from power_grid_model.data_types import ComponentList, Dataset, SingleDataset, SinglePythonDataset |
18 | 17 | from power_grid_model.utils import json_deserialize, json_serialize |
19 | 18 |
|
|
22 | 21 | from power_grid_model_io.data_types import ExtraInfo, StructuredData |
23 | 22 | from power_grid_model_io.utils.dict import merge_dicts |
24 | 23 |
|
| 24 | +_NAN_FUNC = { |
| 25 | + np.dtype("f8"): lambda x: np.all(np.isnan(x)), |
| 26 | + np.dtype("i4"): lambda x: np.all(x == np.iinfo(np.dtype("i4")).min), |
| 27 | + np.dtype("i1"): lambda x: np.all(x == np.iinfo(np.dtype("i1")).min), |
| 28 | +} |
| 29 | + |
25 | 30 |
|
26 | 31 | class PgmJsonConverter(BaseConverter[StructuredData]): |
27 | 32 | """ |
@@ -216,8 +221,7 @@ def _is_batch(data: Dataset) -> bool: |
216 | 221 | is_batch = is_dense_batch or is_sparse_batch |
217 | 222 | return bool(is_batch) |
218 | 223 |
|
219 | | - @staticmethod |
220 | | - def _serialize_dataset(data: SingleDataset, extra_info: Optional[ExtraInfo] = None) -> SinglePythonDataset: |
| 224 | + def _serialize_dataset(self, data: SingleDataset, extra_info: Optional[ExtraInfo] = None) -> SinglePythonDataset: |
221 | 225 | """This function converts a single power-grid-model dataset to a structured dataset |
222 | 226 |
|
223 | 227 | Args: |
@@ -248,7 +252,7 @@ def _serialize_dataset(data: SingleDataset, extra_info: Optional[ExtraInfo] = No |
248 | 252 | { |
249 | 253 | attribute: obj[attribute].tolist() |
250 | 254 | for attribute in objects.dtype.names |
251 | | - if not is_nan(obj[attribute]) |
| 255 | + if not self._is_nan(obj[attribute]) |
252 | 256 | }, |
253 | 257 | extra_info.get(obj["id"], {}), |
254 | 258 | ) |
@@ -296,3 +300,16 @@ def _get_first_by(data: List[Dict[str, Any]], field: str, value: Any) -> Optiona |
296 | 300 | return entry |
297 | 301 |
|
298 | 302 | return None |
| 303 | + |
| 304 | + @staticmethod |
| 305 | + def _is_nan(data: np.ndarray) -> bool: |
| 306 | + """ |
| 307 | + Determine whether the data point is valid |
| 308 | + Args: |
| 309 | + data: a single scalar or numpy array |
| 310 | +
|
| 311 | + Returns: |
| 312 | + True when all the data points are invalid |
| 313 | + False otherwise |
| 314 | + """ |
| 315 | + return bool(_NAN_FUNC[data.dtype](data)) |
0 commit comments