File tree Expand file tree Collapse file tree 2 files changed +4
-1
lines changed Expand file tree Collapse file tree 2 files changed +4
-1
lines changed Original file line number Diff line number Diff line change @@ -123,7 +123,7 @@ def check_hotswap(do_hotswap):
123123        unet  =  get_small_unet ()
124124        file_name  =  os .path .join (tmp_dirname , "pytorch_lora_weights.safetensors" )
125125        unet .load_attn_procs (file_name )
126-         #  unet = torch.compile(unet, mode="reduce-overhead")
126+         unet  =  torch .compile (unet , mode = "reduce-overhead" )
127127
128128        torch .manual_seed (42 )
129129        out0  =  unet (** dummy_input )["sample" ]
Original file line number Diff line number Diff line change @@ -2070,6 +2070,9 @@ class TestLoraHotSwapping:
20702070    tested there. The goal of this test is specifically to ensure that hotswapping with diffusers does not require 
20712071    recompilation. 
20722072
2073+     The reason why we need to shell out instead of just running the script inside of the test is that shelling out is 
2074+     required to collect the torch.compile logs. 
2075+ 
20732076    """ 
20742077
20752078    @slow  
 
 
   
 
     
   
   
          
    
    
     
    
      
     
     
    You can’t perform that action at this time.
  
 
    
  
    
      
        
     
       
      
     
   
 
    
    
  
 
  
 
     
    
0 commit comments