Skip to content

Commit bc157e6

Browse files
Change order of test decorators
parameterized.expand seems to ignore skip decorators if added in last place (i.e. innermost decorator).
1 parent 5ab1460 commit bc157e6

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

tests/pipelines/test_pipelines.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2319,19 +2319,19 @@ def check_hotswap(self, do_compile, rank0, rank1):
23192319
with self.assertRaisesRegex(ValueError, msg):
23202320
unet.load_lora_adapter(file_name1, adapter_name=name, hotswap=True)
23212321

2322+
@parameterized.expand([(11, 11), (7, 13), (13, 7)]) # important to test small to large and vice versa
23222323
@slow
23232324
@require_torch_2
23242325
@require_torch_accelerator
23252326
@require_peft_backend
2326-
@parameterized.expand([(11, 11), (7, 13), (13, 7)]) # important to test small to large and vice versa
23272327
def test_hotswapping_diffusers_model(self, rank0, rank1):
23282328
self.check_hotswap(do_compile=False, rank0=rank0, rank1=rank1)
23292329

2330+
@parameterized.expand([(11, 11), (7, 13), (13, 7)]) # important to test small to large and vice versa
23302331
@slow
23312332
@require_torch_2
23322333
@require_torch_accelerator
23332334
@require_peft_backend
2334-
@parameterized.expand([(11, 11), (7, 13), (13, 7)]) # important to test small to large and vice versa
23352335
def test_hotswapping_compiled_diffusers_model(self, rank0, rank1):
23362336
# It's important to add this context to raise an error on recompilation
23372337
with torch._dynamo.config.patch(error_on_recompile=True):
@@ -2422,11 +2422,11 @@ def check_pipeline_hotswap(self, rank0, rank1):
24222422
# sanity check: since it's the same LoRA, the results should be identical
24232423
assert np.allclose(output1_before, output1_after, atol=tol, rtol=tol)
24242424

2425+
@parameterized.expand([(11, 11), (7, 13), (13, 7)]) # important to test small to large and vice versa
24252426
@slow
24262427
@require_torch_2
24272428
@require_torch_accelerator
24282429
@require_peft_backend
2429-
@parameterized.expand([(11, 11), (7, 13), (13, 7)]) # important to test small to large and vice versa
24302430
def test_hotswapping_compiled_diffusers_pipline(self, rank0, rank1):
24312431
# It's important to add this context to raise an error on recompilation
24322432
with torch._dynamo.config.patch(error_on_recompile=True):

0 commit comments

Comments
 (0)