Skip to content

Commit 44bf341

Browse files
authored
Merge branch 'main' into feature/main-model-refactor-clean-templates-experiment
Signed-off-by: Nitish Bharambe <[email protected]>
2 parents 3b1358c + 6c9605a commit 44bf341

File tree

11 files changed

+95
-149
lines changed

11 files changed

+95
-149
lines changed

power_grid_model_c/power_grid_model/include/power_grid_model/job_adapter.hpp

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -6,10 +6,11 @@
66

77
// Adapter that connects the JobDispatch to the MainModelImpl
88

9-
#include "auxiliary/dataset.hpp"
109
#include "job_interface.hpp"
1110
#include "main_model_fwd.hpp"
1211

12+
#include "auxiliary/dataset.hpp"
13+
#include "common/dummy_logging.hpp"
1314
#include "main_core/update.hpp"
1415

1516
namespace power_grid_model {
@@ -99,7 +100,7 @@ template <class MainModel> class JobAdapter : public JobInterface<JobAdapter<Mai
99100

100101
void calculate_impl(MutableDataset const& result_data, Idx scenario_idx) const {
101102
MainModel::calculator(options_.get(), model_reference_.get(), result_data.get_individual_scenario(scenario_idx),
102-
false);
103+
false, logger());
103104
}
104105

105106
void cache_calculate_impl() const {
@@ -112,7 +113,7 @@ template <class MainModel> class JobAdapter : public JobInterface<JobAdapter<Mai
112113
"sym_output",
113114
model_reference_.get().meta_data(),
114115
},
115-
true);
116+
true, logger());
116117
} catch (SparseMatrixError const&) { // NOLINT(bugprone-empty-catch) // NOSONAR
117118
// missing entries are provided in the update data
118119
} catch (NotObservableError const&) { // NOLINT(bugprone-empty-catch) // NOSONAR
@@ -157,13 +158,12 @@ template <class MainModel> class JobAdapter : public JobInterface<JobAdapter<Mai
157158
});
158159
}
159160

160-
void reset_logger_impl() {
161-
log_ = nullptr;
162-
model_reference_.get().reset_logger();
163-
}
164-
void set_logger_impl(Logger& log) {
165-
log_ = &log;
166-
model_reference_.get().set_logger(*log_);
161+
void reset_logger_impl() { log_ = nullptr; }
162+
void set_logger_impl(Logger& log) { log_ = &log; }
163+
164+
Logger& logger() const {
165+
static common::logging::NoLogger no_log{};
166+
return log_ != nullptr ? *log_ : no_log;
167167
}
168168
};
169169
} // namespace power_grid_model

power_grid_model_c/power_grid_model/include/power_grid_model/main_model_impl.hpp

Lines changed: 61 additions & 116 deletions
Large diffs are not rendered by default.

pyproject.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -154,7 +154,6 @@ select = [
154154
]
155155

156156
ignore = [
157-
"UP038", # deprecated, https://docs.astral.sh/ruff/rules/non-pep604-isinstance/#deprecation
158157
"PT019" # flags false positives
159158
]
160159

tests/unit/test_buffer_handling.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ def test__get_buffer_properties__batch_requested_for_single_data(component_type,
111111
schema = power_grid_meta_data[DatasetType.update][component_type]
112112

113113
with pytest.raises(
114-
ValueError, match="Incorrect/inconsistent data provided: single data provided but batch data expected."
114+
ValueError, match=r"Incorrect\/inconsistent data provided: single data provided but batch data expected"
115115
):
116116
get_buffer_properties(data, schema=schema, is_batch=True, batch_size=BATCH_DATASET_NDIM)
117117

@@ -138,7 +138,7 @@ def test__get_buffer_properties__single_requested_for_batch(component_type, is_s
138138
get_buffer_properties(data, schema=schema, is_batch=False, batch_size=None)
139139
else:
140140
with pytest.raises(
141-
ValueError, match="Incorrect/inconsistent data provided: batch data provided but single data expected."
141+
ValueError, match=r"Incorrect\/inconsistent data provided: batch data provided but single data expected"
142142
):
143143
get_buffer_properties(data, schema=schema, is_batch=False, batch_size=None)
144144

@@ -233,7 +233,7 @@ def test__get_raw_attribute_data_view_fail(component, attribute):
233233
assert old_shape[-1] == asym_dense_batch_last_dim
234234
assert updated_shape[-1] == unsupported_asym_dense_batch_last_dim
235235

236-
with pytest.raises(ValueError, match="Given data has a different schema than supported."):
236+
with pytest.raises(ValueError, match="Given data has a different schema than supported"):
237237
get_buffer_view(data, schema=schema, is_batch=True)
238238

239239

@@ -322,5 +322,5 @@ def test__get_raw_attribute_data_view_directly_fail(component, attr_data_shape,
322322
arr = np.zeros(attr_data_shape)
323323
schema = power_grid_meta_data[DatasetType.update][component]
324324

325-
with pytest.raises(ValueError, match="Given data has a different schema than supported."):
325+
with pytest.raises(ValueError, match="Given data has a different schema than supported"):
326326
_get_raw_attribute_data_view(arr, schema, attribute)

tests/unit/test_data_handling.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -168,5 +168,5 @@ def test_dtype_compatibility_check_compatible():
168168
def test_dtype_compatibility_check__error():
169169
nodes = initialize_array(DT.sym_output, CT.node, (1, 2))
170170
data = {CT.node: nodes.astype(nodes.dtype.newbyteorder("S"))}
171-
with pytest.raises(ValueError, match="Data type does not match schema."):
171+
with pytest.raises(ValueError, match="Data type does not match schema"):
172172
CMutableDataset(data, DT.sym_output)

tests/unit/test_dataset.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ def test_const_dataset__empty_dataset(dataset_type):
4848
assert info.elements_per_scenario() == {}
4949
assert info.total_elements() == {}
5050

51-
with pytest.raises(ValueError, match="The dataset type could not be deduced."):
51+
with pytest.raises(ValueError, match="The dataset type could not be deduced"):
5252
CConstDataset(data={})
5353

5454

@@ -173,7 +173,7 @@ def test_const_dataset__mixed_batch_size(dataset_type):
173173
ComponentType.node: np.zeros(shape=(2, 3), dtype=power_grid_meta_data[dataset_type][ComponentType.node]),
174174
ComponentType.line: np.zeros(shape=(3, 3), dtype=power_grid_meta_data[dataset_type][ComponentType.line]),
175175
}
176-
with pytest.raises(ValueError, match="Dataset must have a consistent batch size across all components."):
176+
with pytest.raises(ValueError, match="Dataset must have a consistent batch size across all components"):
177177
CConstDataset(data, dataset_type)
178178

179179

@@ -209,5 +209,5 @@ def test_const_dataset__different_dtype(dataset_type, dtype, supported):
209209
result = CConstDataset(data, dataset_type)
210210
assert result.get_info().total_elements() == {ComponentType.node: 3}
211211
else:
212-
with pytest.raises(ValueError, match="Data type does not match schema."):
212+
with pytest.raises(ValueError, match="Data type does not match schema"):
213213
CConstDataset(data, dataset_type)

tests/unit/test_internal_utils.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -575,8 +575,8 @@ def test_get_and_verify_batch_sizes_inconsistent_batch_sizes_two_components():
575575
update_data: BatchDataset = {"foo": np.empty(shape=(3, 3)), "bar": np.empty(shape=(2, 3))}
576576
with pytest.raises(
577577
ValueError,
578-
match="Inconsistent number of batches in batch data. "
579-
"Component 'bar' contains 2 batches, while 'foo' contained 3 batches.",
578+
match=r"Inconsistent number of batches in batch data\. "
579+
r"Component \'bar\' contains 2 batches, while \'foo\' contained 3 batches\.",
580580
):
581581
get_and_verify_batch_sizes(update_data)
582582

@@ -589,16 +589,16 @@ def test_convert_get_and_verify_batch_sizes_inconsistent_batch_sizes_more_than_t
589589
}
590590
with pytest.raises(
591591
ValueError,
592-
match="Inconsistent number of batches in batch data. "
593-
"Component 'baz' contains 2 batches, while bar/foo contained 3 batches.",
592+
match=r"Inconsistent number of batches in batch data\. "
593+
r"Component \'baz\' contains 2 batches, while bar\/foo contained 3 batches\.",
594594
):
595595
get_and_verify_batch_sizes(update_data)
596596

597597

598598
@patch("power_grid_model._core.utils.get_and_verify_batch_sizes")
599599
def test_convert_batch_dataset_to_batch_list_missing_key_sparse(_mock: MagicMock):
600600
update_data: BatchDataset = {"foo": {"a": np.empty(3), "data": np.empty(3)}} # type: ignore
601-
with pytest.raises(KeyError, match="Invalid data for 'foo' component. Missing 'indptr' in sparse batch data. "):
601+
with pytest.raises(KeyError, match=r"Invalid data for \'foo\' component\. Missing \'indptr\' in sparse batch data"):
602602
convert_batch_dataset_to_batch_list(update_data)
603603

604604

@@ -607,7 +607,7 @@ def test_convert_batch_dataset_to_batch_list_invalid_type_sparse(_mock: MagicMoc
607607
update_data: BatchDataset = {"foo": "wrong type"} # type: ignore
608608
with pytest.raises(
609609
TypeError,
610-
match="Invalid data for 'foo' component. Expecting a 1D/2D Numpy structured array or a dictionary of such.",
610+
match=r"Invalid data for \'foo\' component\. Expecting a 1D\/2D Numpy structured array or a dictionary of such",
611611
):
612612
convert_batch_dataset_to_batch_list(update_data)
613613

@@ -789,7 +789,7 @@ def test_get_dataset_type(dataset_type):
789789

790790

791791
def test_get_dataset_type__empty_data():
792-
with pytest.raises(ValueError, match="At least one component should have row based data."):
792+
with pytest.raises(ValueError, match="At least one component should have row based data"):
793793
get_dataset_type(data={})
794794

795795

tests/unit/test_serialization.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -691,7 +691,7 @@ def test_json_serialize_empty_dataset(dataset_type, use_compact_list: bool):
691691
assert isinstance(result, str)
692692
assert result == reference
693693

694-
with pytest.raises(ValueError, match="At least one component should have row based data."):
694+
with pytest.raises(ValueError, match="At least one component should have row based data"):
695695
json_serialize({}, use_compact_list=use_compact_list, indent=indent)
696696

697697

@@ -713,7 +713,7 @@ def test_msgpack_serialize_empty_dataset(dataset_type, use_compact_list):
713713
reference = empty_dataset(dataset_type)
714714
assert from_msgpack(msgpack_serialize({}, dataset_type, use_compact_list=use_compact_list)) == reference
715715

716-
with pytest.raises(ValueError, match="At least one component should have row based data."):
716+
with pytest.raises(ValueError, match="At least one component should have row based data"):
717717
json_serialize({}, use_compact_list=use_compact_list)
718718

719719

tests/unit/test_utils.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -108,9 +108,9 @@ def test_get_dataset_batch_size_mixed():
108108
"indptr": np.array([0, 2, 3]),
109109
},
110110
}
111-
with pytest.raises(ValueError, match="Inconsistent number of batches in batch data."):
111+
with pytest.raises(ValueError, match="Inconsistent number of batches in batch data"):
112112
get_dataset_batch_size(data_dense)
113-
with pytest.raises(ValueError, match="Inconsistent number of batches in batch data."):
113+
with pytest.raises(ValueError, match="Inconsistent number of batches in batch data"):
114114
get_dataset_batch_size(data_sparse)
115115

116116

tests/unit/validation/test_rules.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -673,7 +673,7 @@ def _mock_nan_type(component: ComponentType, field: str):
673673
assert len(errors) == 1
674674
assert errors == [MultiFieldValidationError("foo_test", ["foo", "bar", "baz"], [1])]
675675

676-
with pytest.raises(ValueError, match="The fields parameter must contain at least 2 fields.") as excinfo:
676+
with pytest.raises(ValueError, match="The fields parameter must contain at least 2 fields") as excinfo:
677677
not_all_missing(invalid, ["bar"], "foo_test")
678678

679679
assert excinfo.type is ValueError

0 commit comments

Comments
 (0)