@@ -2178,6 +2178,11 @@ def test_ddpm_ddim_equality_batched(self):
21782178        assert  np .abs (ddpm_images  -  ddim_images ).max () <  1e-1 
21792179
21802180
2181+ @slow  
2182+ @require_torch_2  
2183+ @require_torch_accelerator  
2184+ @require_peft_backend  
2185+ @is_torch_compile  
21812186class  TestLoraHotSwappingForPipeline (unittest .TestCase ):
21822187    """Test that hotswapping does not result in recompilation in a pipeline. 
21832188
@@ -2297,42 +2302,26 @@ def check_pipeline_hotswap(self, do_compile, rank0, rank1, target_modules):
22972302            assert  np .allclose (output1_before , output1_after , atol = tol , rtol = tol )
22982303
22992304    @parameterized .expand ([(11 , 11 ), (7 , 13 ), (13 , 7 )])  # important to test small to large and vice versa  
2300-     @slow  
2301-     @require_torch_2  
2302-     @require_torch_accelerator  
2303-     @require_peft_backend  
23042305    def  test_hotswapping_pipeline (self , rank0 , rank1 ):
23052306        self .check_pipeline_hotswap (
23062307            do_compile = False , rank0 = rank0 , rank1 = rank1 , target_modules = ["to_q" , "to_k" , "to_v" , "to_out.0" ]
23072308        )
23082309
23092310    @parameterized .expand ([(11 , 11 ), (7 , 13 ), (13 , 7 )])  # important to test small to large and vice versa  
2310-     @slow  
2311-     @require_torch_2  
2312-     @require_torch_accelerator  
2313-     @require_peft_backend  
23142311    def  test_hotswapping_compiled_pipline_linear (self , rank0 , rank1 ):
23152312        # It's important to add this context to raise an error on recompilation 
23162313        target_modules  =  ["to_q" , "to_k" , "to_v" , "to_out.0" ]
23172314        with  torch ._dynamo .config .patch (error_on_recompile = True ):
23182315            self .check_pipeline_hotswap (do_compile = True , rank0 = rank0 , rank1 = rank1 , target_modules = target_modules )
23192316
23202317    @parameterized .expand ([(11 , 11 ), (7 , 13 ), (13 , 7 )])  # important to test small to large and vice versa  
2321-     @slow  
2322-     @require_torch_2  
2323-     @require_torch_accelerator  
2324-     @require_peft_backend  
23252318    def  test_hotswapping_compiled_pipline_conv2d (self , rank0 , rank1 ):
23262319        # It's important to add this context to raise an error on recompilation 
23272320        target_modules  =  ["conv" , "conv1" , "conv2" ]
23282321        with  torch ._dynamo .config .patch (error_on_recompile = True ):
23292322            self .check_pipeline_hotswap (do_compile = True , rank0 = rank0 , rank1 = rank1 , target_modules = target_modules )
23302323
23312324    @parameterized .expand ([(11 , 11 ), (7 , 13 ), (13 , 7 )])  # important to test small to large and vice versa  
2332-     @slow  
2333-     @require_torch_2  
2334-     @require_torch_accelerator  
2335-     @require_peft_backend  
23362325    def  test_hotswapping_compiled_pipline_both_linear_and_conv2d (self , rank0 , rank1 ):
23372326        # It's important to add this context to raise an error on recompilation 
23382327        target_modules  =  ["to_q" , "conv" ]
0 commit comments