@@ -1200,15 +1200,16 @@ def test_forward_scriptability(self):
12001200 # Non-regression test for https://github.com/pytorch/vision/issues/4078
12011201 torch .jit .script (ops .DeformConv2d (in_channels = 8 , out_channels = 8 , kernel_size = 3 ))
12021202
1203+
12031204# NS: Removeme once bacward is implemented
12041205def xfail_if_mps (x ):
12051206 mps_xfail_param = pytest .param ("mps" , marks = (pytest .mark .needs_mps , pytest .mark .xfail ))
12061207 new_pytestmark = []
12071208 for mark in x .pytestmark :
12081209 if isinstance (mark , pytest .Mark ) and mark .name == "parametrize" :
1209- if mark .args [0 ] == ' device' :
1210+ if mark .args [0 ] == " device" :
12101211 params = cpu_and_cuda () + (mps_xfail_param ,)
1211- new_pytestmark .append (pytest .mark .parametrize (' device' , params ))
1212+ new_pytestmark .append (pytest .mark .parametrize (" device" , params ))
12121213 continue
12131214 new_pytestmark .append (mark )
12141215 x .__dict__ ["pytestmark" ] = new_pytestmark
@@ -1220,8 +1221,10 @@ def xfail_if_mps(x):
12201221 namespaces = ["torchvision" ],
12211222 failures_dict_path = os .path .join (os .path .dirname (__file__ ), "optests_failures_dict.json" ),
12221223 # Skip tests due to unimplemented backward
1223- additional_decorators = {"test_aot_dispatch_dynamic__test_forward" : [xfail_if_mps ],
1224- "test_autograd_registration__test_forward" : [xfail_if_mps ]},
1224+ additional_decorators = {
1225+ "test_aot_dispatch_dynamic__test_forward" : [xfail_if_mps ],
1226+ "test_autograd_registration__test_forward" : [xfail_if_mps ],
1227+ },
12251228 test_utils = OPTESTS ,
12261229)
12271230
0 commit comments