Skip to content

Commit 7615d7e

Browse files
authored
Merge pull request #1121 from PowerGridModel/fix-nightly
Fix nightly: RUF043
2 parents 3f20602 + 15f0fe9 commit 7615d7e

File tree

9 files changed

+24
-23
lines changed

9 files changed

+24
-23
lines changed

pyproject.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -154,7 +154,6 @@ select = [
154154
]
155155

156156
ignore = [
157-
"UP038", # deprecated, https://docs.astral.sh/ruff/rules/non-pep604-isinstance/#deprecation
158157
"PT019" # flags false positives
159158
]
160159

tests/unit/test_buffer_handling.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ def test__get_buffer_properties__batch_requested_for_single_data(component_type,
111111
schema = power_grid_meta_data[DatasetType.update][component_type]
112112

113113
with pytest.raises(
114-
ValueError, match="Incorrect/inconsistent data provided: single data provided but batch data expected."
114+
ValueError, match=r"Incorrect\/inconsistent data provided: single data provided but batch data expected"
115115
):
116116
get_buffer_properties(data, schema=schema, is_batch=True, batch_size=BATCH_DATASET_NDIM)
117117

@@ -138,7 +138,7 @@ def test__get_buffer_properties__single_requested_for_batch(component_type, is_s
138138
get_buffer_properties(data, schema=schema, is_batch=False, batch_size=None)
139139
else:
140140
with pytest.raises(
141-
ValueError, match="Incorrect/inconsistent data provided: batch data provided but single data expected."
141+
ValueError, match=r"Incorrect\/inconsistent data provided: batch data provided but single data expected"
142142
):
143143
get_buffer_properties(data, schema=schema, is_batch=False, batch_size=None)
144144

@@ -233,7 +233,7 @@ def test__get_raw_attribute_data_view_fail(component, attribute):
233233
assert old_shape[-1] == asym_dense_batch_last_dim
234234
assert updated_shape[-1] == unsupported_asym_dense_batch_last_dim
235235

236-
with pytest.raises(ValueError, match="Given data has a different schema than supported."):
236+
with pytest.raises(ValueError, match="Given data has a different schema than supported"):
237237
get_buffer_view(data, schema=schema, is_batch=True)
238238

239239

@@ -322,5 +322,5 @@ def test__get_raw_attribute_data_view_directly_fail(component, attr_data_shape,
322322
arr = np.zeros(attr_data_shape)
323323
schema = power_grid_meta_data[DatasetType.update][component]
324324

325-
with pytest.raises(ValueError, match="Given data has a different schema than supported."):
325+
with pytest.raises(ValueError, match="Given data has a different schema than supported"):
326326
_get_raw_attribute_data_view(arr, schema, attribute)

tests/unit/test_data_handling.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -168,5 +168,5 @@ def test_dtype_compatibility_check_compatible():
168168
def test_dtype_compatibility_check__error():
169169
nodes = initialize_array(DT.sym_output, CT.node, (1, 2))
170170
data = {CT.node: nodes.astype(nodes.dtype.newbyteorder("S"))}
171-
with pytest.raises(ValueError, match="Data type does not match schema."):
171+
with pytest.raises(ValueError, match="Data type does not match schema"):
172172
CMutableDataset(data, DT.sym_output)

tests/unit/test_dataset.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ def test_const_dataset__empty_dataset(dataset_type):
4848
assert info.elements_per_scenario() == {}
4949
assert info.total_elements() == {}
5050

51-
with pytest.raises(ValueError, match="The dataset type could not be deduced."):
51+
with pytest.raises(ValueError, match="The dataset type could not be deduced"):
5252
CConstDataset(data={})
5353

5454

@@ -173,7 +173,7 @@ def test_const_dataset__mixed_batch_size(dataset_type):
173173
ComponentType.node: np.zeros(shape=(2, 3), dtype=power_grid_meta_data[dataset_type][ComponentType.node]),
174174
ComponentType.line: np.zeros(shape=(3, 3), dtype=power_grid_meta_data[dataset_type][ComponentType.line]),
175175
}
176-
with pytest.raises(ValueError, match="Dataset must have a consistent batch size across all components."):
176+
with pytest.raises(ValueError, match="Dataset must have a consistent batch size across all components"):
177177
CConstDataset(data, dataset_type)
178178

179179

@@ -209,5 +209,5 @@ def test_const_dataset__different_dtype(dataset_type, dtype, supported):
209209
result = CConstDataset(data, dataset_type)
210210
assert result.get_info().total_elements() == {ComponentType.node: 3}
211211
else:
212-
with pytest.raises(ValueError, match="Data type does not match schema."):
212+
with pytest.raises(ValueError, match="Data type does not match schema"):
213213
CConstDataset(data, dataset_type)

tests/unit/test_internal_utils.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -575,8 +575,8 @@ def test_get_and_verify_batch_sizes_inconsistent_batch_sizes_two_components():
575575
update_data: BatchDataset = {"foo": np.empty(shape=(3, 3)), "bar": np.empty(shape=(2, 3))}
576576
with pytest.raises(
577577
ValueError,
578-
match="Inconsistent number of batches in batch data. "
579-
"Component 'bar' contains 2 batches, while 'foo' contained 3 batches.",
578+
match=r"Inconsistent number of batches in batch data\. "
579+
r"Component \'bar\' contains 2 batches, while \'foo\' contained 3 batches\.",
580580
):
581581
get_and_verify_batch_sizes(update_data)
582582

@@ -589,16 +589,16 @@ def test_convert_get_and_verify_batch_sizes_inconsistent_batch_sizes_more_than_t
589589
}
590590
with pytest.raises(
591591
ValueError,
592-
match="Inconsistent number of batches in batch data. "
593-
"Component 'baz' contains 2 batches, while bar/foo contained 3 batches.",
592+
match=r"Inconsistent number of batches in batch data\. "
593+
r"Component \'baz\' contains 2 batches, while bar\/foo contained 3 batches\.",
594594
):
595595
get_and_verify_batch_sizes(update_data)
596596

597597

598598
@patch("power_grid_model._core.utils.get_and_verify_batch_sizes")
599599
def test_convert_batch_dataset_to_batch_list_missing_key_sparse(_mock: MagicMock):
600600
update_data: BatchDataset = {"foo": {"a": np.empty(3), "data": np.empty(3)}} # type: ignore
601-
with pytest.raises(KeyError, match="Invalid data for 'foo' component. Missing 'indptr' in sparse batch data. "):
601+
with pytest.raises(KeyError, match=r"Invalid data for \'foo\' component\. Missing \'indptr\' in sparse batch data"):
602602
convert_batch_dataset_to_batch_list(update_data)
603603

604604

@@ -607,7 +607,7 @@ def test_convert_batch_dataset_to_batch_list_invalid_type_sparse(_mock: MagicMoc
607607
update_data: BatchDataset = {"foo": "wrong type"} # type: ignore
608608
with pytest.raises(
609609
TypeError,
610-
match="Invalid data for 'foo' component. Expecting a 1D/2D Numpy structured array or a dictionary of such.",
610+
match=r"Invalid data for \'foo\' component\. Expecting a 1D\/2D Numpy structured array or a dictionary of such",
611611
):
612612
convert_batch_dataset_to_batch_list(update_data)
613613

@@ -789,7 +789,7 @@ def test_get_dataset_type(dataset_type):
789789

790790

791791
def test_get_dataset_type__empty_data():
792-
with pytest.raises(ValueError, match="At least one component should have row based data."):
792+
with pytest.raises(ValueError, match="At least one component should have row based data"):
793793
get_dataset_type(data={})
794794

795795

tests/unit/test_serialization.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -691,7 +691,7 @@ def test_json_serialize_empty_dataset(dataset_type, use_compact_list: bool):
691691
assert isinstance(result, str)
692692
assert result == reference
693693

694-
with pytest.raises(ValueError, match="At least one component should have row based data."):
694+
with pytest.raises(ValueError, match="At least one component should have row based data"):
695695
json_serialize({}, use_compact_list=use_compact_list, indent=indent)
696696

697697

@@ -713,7 +713,7 @@ def test_msgpack_serialize_empty_dataset(dataset_type, use_compact_list):
713713
reference = empty_dataset(dataset_type)
714714
assert from_msgpack(msgpack_serialize({}, dataset_type, use_compact_list=use_compact_list)) == reference
715715

716-
with pytest.raises(ValueError, match="At least one component should have row based data."):
716+
with pytest.raises(ValueError, match="At least one component should have row based data"):
717717
json_serialize({}, use_compact_list=use_compact_list)
718718

719719

tests/unit/test_utils.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -108,9 +108,9 @@ def test_get_dataset_batch_size_mixed():
108108
"indptr": np.array([0, 2, 3]),
109109
},
110110
}
111-
with pytest.raises(ValueError, match="Inconsistent number of batches in batch data."):
111+
with pytest.raises(ValueError, match="Inconsistent number of batches in batch data"):
112112
get_dataset_batch_size(data_dense)
113-
with pytest.raises(ValueError, match="Inconsistent number of batches in batch data."):
113+
with pytest.raises(ValueError, match="Inconsistent number of batches in batch data"):
114114
get_dataset_batch_size(data_sparse)
115115

116116

tests/unit/validation/test_rules.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -673,7 +673,7 @@ def _mock_nan_type(component: ComponentType, field: str):
673673
assert len(errors) == 1
674674
assert errors == [MultiFieldValidationError("foo_test", ["foo", "bar", "baz"], [1])]
675675

676-
with pytest.raises(ValueError, match="The fields parameter must contain at least 2 fields.") as excinfo:
676+
with pytest.raises(ValueError, match="The fields parameter must contain at least 2 fields") as excinfo:
677677
not_all_missing(invalid, ["bar"], "foo_test")
678678

679679
assert excinfo.type is ValueError

tests/unit/validation/test_validation_functions.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -88,11 +88,13 @@ def test_assert_valid_data_structure():
8888

8989
# Invalid component type
9090
input_with_wrong_component = {ComponentType.node: node_input, "some_random_component": line_input}
91-
with pytest.raises(KeyError, match="Unknown component 'some_random_component' in DatasetType.input data."):
91+
with pytest.raises(KeyError, match=r"Unknown component \'some_random_component\' in DatasetType\.input data"):
9292
assert_valid_data_structure(input_with_wrong_component, DatasetType.input)
9393

9494
input_with_wrong_data_type = {ComponentType.node: node_input, ComponentType.line: [1, 2, 3]}
95-
with pytest.raises(TypeError, match="Unexpected data type list for 'ComponentType.line' DatasetType.input data "):
95+
with pytest.raises(
96+
TypeError, match=r"Unexpected data type list for \'ComponentType\.line\' DatasetType\.input data"
97+
):
9698
assert_valid_data_structure(input_with_wrong_data_type, DatasetType.input)
9799

98100

0 commit comments

Comments
 (0)