Skip to content

Commit b6316e8

Browse files
committed
Apply rule RUF043
1 parent e27a43a commit b6316e8

22 files changed

+59
-57
lines changed

tests/compile/test_builders.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ class TestOpFromGraph(unittest_tools.InferShapeTester):
4545
def test_valid_input(self):
4646
x, y, z = matrices("xyz")
4747

48-
with pytest.raises(ValueError, match="Expected at least.*"):
48+
with pytest.raises(ValueError, match=r"Expected at least.*"):
4949
OpFromGraph([x], [x])()
5050

5151
with pytest.raises(ValueError, match=r"Expected 1 input\(s\)"):

tests/graph/test_compute_test_value.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -264,7 +264,7 @@ def fx(prior_result, A):
264264
fn=fx, outputs_info=pt.ones_like(A.T), non_sequences=A, n_steps=k
265265
)
266266

267-
with pytest.raises(ValueError, match="^could not broadcast input"):
267+
with pytest.raises(ValueError, match=r"^could not broadcast input"):
268268
pytensor.scan(
269269
fn=fx, outputs_info=pt.ones_like(A.T), non_sequences=A, n_steps=k
270270
)

tests/graph/test_fg.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -62,8 +62,8 @@ def test_validate_inputs(self):
6262
with pytest.raises(
6363
ValueError,
6464
match=(
65-
"One of the provided inputs is the output of an already existing node. "
66-
"If that is okay, either discard that input's owner or use graph.clone."
65+
"One of the provided inputs is the output of an already existing node\\. "
66+
"If that is okay, either discard that input's owner or use graph\\.clone\\."
6767
),
6868
):
6969
var3 = op1(var1)
@@ -208,7 +208,7 @@ def test_import_var(self):
208208
assert var5 in fg.variables
209209
assert var5.owner in fg.apply_nodes
210210

211-
with pytest.raises(TypeError, match="Computation graph contains.*"):
211+
with pytest.raises(TypeError, match=r"Computation graph contains.*"):
212212
from pytensor.graph.null_type import NullType
213213

214214
fg.import_var(NullType()(), "testing")
@@ -265,7 +265,7 @@ def test_replace_test_value(self):
265265

266266
assert var6.tag.test_value.shape != var4.tag.test_value.shape
267267

268-
with pytest.raises(AssertionError, match="The replacement.*"):
268+
with pytest.raises(AssertionError, match=r"The replacement.*"):
269269
fg.replace(var4, var6)
270270

271271
def test_replace(self):
@@ -342,27 +342,27 @@ def test_check_integrity(self):
342342
var5 = op3(var4, var2, var2)
343343
fg = FunctionGraph([var1, var2], [var3, var5], clone=False)
344344

345-
with pytest.raises(Exception, match="The following nodes are .*"):
345+
with pytest.raises(Exception, match=r"The following nodes are .*"):
346346
fg.apply_nodes.remove(var5.owner)
347347

348348
fg.check_integrity()
349349

350-
with pytest.raises(Exception, match="Inconsistent clients.*"):
350+
with pytest.raises(Exception, match=r"Inconsistent clients.*"):
351351
fg.apply_nodes.add(var5.owner)
352352
fg.remove_client(var2, (var5.owner, 1))
353353

354354
fg.check_integrity()
355355

356356
fg.add_client(var2, (var5.owner, 1))
357357

358-
with pytest.raises(Exception, match="The following variables are.*"):
358+
with pytest.raises(Exception, match=r"The following variables are.*"):
359359
fg.variables.remove(var4)
360360

361361
fg.check_integrity()
362362

363363
fg.variables.add(var4)
364364

365-
with pytest.raises(Exception, match="Undeclared input.*"):
365+
with pytest.raises(Exception, match=r"Undeclared input.*"):
366366
var6 = MyVariable2("var6")
367367
fg.clients[var6] = [(var5.owner, 3)]
368368
fg.variables.add(var6)
@@ -376,26 +376,26 @@ def test_check_integrity(self):
376376
# TODO: What if the index value is greater than 1? It will throw an
377377
# `IndexError`, but that doesn't sound like anything we'd want.
378378
out_node = Output(idx=1).make_node(var4)
379-
with pytest.raises(Exception, match="Inconsistent clients list.*"):
379+
with pytest.raises(Exception, match=r"Inconsistent clients list.*"):
380380
fg.add_client(var4, (out_node, 0))
381381

382382
fg.check_integrity()
383383

384384
fg.remove_client(var4, (out_node, 0))
385385

386-
with pytest.raises(TypeError, match="The first entry of.*"):
386+
with pytest.raises(TypeError, match=r"The first entry of.*"):
387387
fg.add_client(var4, (None, 0))
388388

389389
var7 = op1(var4)
390390

391-
with pytest.raises(Exception, match="Client not in FunctionGraph.*"):
391+
with pytest.raises(Exception, match=r"Client not in FunctionGraph.*"):
392392
fg.add_client(var4, (var7.owner, 0))
393393

394394
fg.check_integrity()
395395

396396
fg.remove_client(var4, (var7.owner, 0))
397397

398-
with pytest.raises(Exception, match="Inconsistent clients list.*"):
398+
with pytest.raises(Exception, match=r"Inconsistent clients list.*"):
399399
fg.add_client(var4, (var3.owner, 0))
400400

401401
fg.check_integrity()

tests/link/numba/test_random.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -662,7 +662,7 @@ def test_unaligned_RandomVariable(rv_op, dist_args, base_size, cdf_name, params_
662662
(10, 4),
663663
pytest.raises(
664664
ValueError,
665-
match="Vectorized input 0 has an incompatible shape in axis 1.",
665+
match="Vectorized input 0 has an incompatible shape in axis 1\\.",
666666
),
667667
),
668668
],

tests/link/pytorch/test_elemwise.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ def test_softmax(axis, dtype):
108108
if dtype == "int64":
109109
with pytest.raises(
110110
NotImplementedError,
111-
match="Pytorch Softmax is not currently implemented for non-float types.",
111+
match="Pytorch Softmax is not currently implemented for non-float types\\.",
112112
):
113113
compare_pytorch_and_py([x], [out], [test_input])
114114
else:
@@ -125,7 +125,7 @@ def test_logsoftmax(axis, dtype):
125125
if dtype == "int64":
126126
with pytest.raises(
127127
NotImplementedError,
128-
match="Pytorch LogSoftmax is not currently implemented for non-float types.",
128+
match="Pytorch LogSoftmax is not currently implemented for non-float types\\.",
129129
):
130130
compare_pytorch_and_py([x], [out], [test_input])
131131
else:

tests/link/pytorch/test_extra_ops.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,9 +24,9 @@ def test_pytorch_CumOp(axis, dtype):
2424

2525
# Create the output variable
2626
if isinstance(axis, tuple):
27-
with pytest.raises(TypeError, match="axis must be an integer or None."):
27+
with pytest.raises(TypeError, match="axis must be an integer or None\\."):
2828
out = pt.cumsum(a, axis=axis)
29-
with pytest.raises(TypeError, match="axis must be an integer or None."):
29+
with pytest.raises(TypeError, match="axis must be an integer or None\\."):
3030
out = pt.cumprod(a, axis=axis)
3131
else:
3232
out = pt.cumsum(a, axis=axis)

tests/link/test_vm.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -421,7 +421,7 @@ def make_thunk(self, *args, **kwargs):
421421

422422
z = BadOp()(a)
423423

424-
with pytest.raises(Exception, match=".*Apply node that caused the error.*"):
424+
with pytest.raises(Exception, match=r".*Apply node that caused the error.*"):
425425
function([a], z, mode=Mode(optimizer=None, linker=linker))
426426

427427

@@ -433,7 +433,7 @@ def __call__(self):
433433
a = scalar()
434434
fg = FunctionGraph(outputs=[SomeOp()(a)])
435435

436-
with pytest.raises(ValueError, match="`nodes` and `thunks`.*"):
436+
with pytest.raises(ValueError, match=r"`nodes` and `thunks`.*"):
437437
SomeVM(fg, fg.apply_nodes, [], [])
438438

439439

@@ -453,7 +453,7 @@ def test_Loop_exception():
453453

454454
thunks = [node.op.make_thunk(node, storage_map, compute_map, []) for node in nodes]
455455

456-
with pytest.raises(ValueError, match="`nodes`, `thunks` and `post_thunk_clear`.*"):
456+
with pytest.raises(ValueError, match=r"`nodes`, `thunks` and `post_thunk_clear`.*"):
457457
Loop(
458458
fg,
459459
fg.apply_nodes,

tests/sparse/test_type.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -79,5 +79,5 @@ def test_SparseTensorType_filter():
7979
x_res = z.type.filter(x.astype("float32"), allow_downcast=True)
8080
assert x_res.dtype == "float64"
8181

82-
with pytest.raises(TypeError, match=".*dtype but got.*"):
82+
with pytest.raises(TypeError, match=r".*dtype but got.*"):
8383
w.type.filter(x)

tests/tensor/random/test_op.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ def test_RandomVariable_basics(strict_test_value_flags):
3232
assert str_res == 'normal_rv{"(),()->()"}'
3333

3434
# `ndims_params` should be a `Sequence` type
35-
with pytest.raises(TypeError, match="^Parameter ndims_params*"):
35+
with pytest.raises(TypeError, match=r"^Parameter ndims_params*"):
3636
RandomVariable(
3737
"normal",
3838
0,
@@ -42,7 +42,7 @@ def test_RandomVariable_basics(strict_test_value_flags):
4242
)
4343

4444
# `size` should be a `Sequence` type
45-
with pytest.raises(TypeError, match="^Parameter size*"):
45+
with pytest.raises(TypeError, match=r"^Parameter size*"):
4646
RandomVariable(
4747
"normal",
4848
0,

tests/tensor/random/test_utils.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -284,7 +284,7 @@ def __init__(self, seed=123):
284284

285285

286286
def test_supp_shape_from_ref_param_shape():
287-
with pytest.raises(ValueError, match="^ndim_supp*"):
287+
with pytest.raises(ValueError, match=r"^ndim_supp*"):
288288
supp_shape_from_ref_param_shape(
289289
ndim_supp=0,
290290
dist_params=(np.array([1, 2]), 0),
@@ -306,7 +306,7 @@ def test_supp_shape_from_ref_param_shape():
306306
)
307307
assert res == (2,)
308308

309-
with pytest.raises(ValueError, match="^Reference parameter*"):
309+
with pytest.raises(ValueError, match=r"^Reference parameter*"):
310310
supp_shape_from_ref_param_shape(
311311
ndim_supp=1,
312312
dist_params=(np.array(1),),

0 commit comments

Comments
 (0)