File tree Expand file tree Collapse file tree 1 file changed +3
-3
lines changed Expand file tree Collapse file tree 1 file changed +3
-3
lines changed Original file line number Diff line number Diff line change @@ -192,14 +192,14 @@ def test_lora_expansion_works_for_absent_keys(self):
192192
193193 self .assertTrue (os .path .isfile (os .path .join (tmpdirname , "pytorch_lora_weights.safetensors" )))
194194 pipe .unload_lora_weights ()
195- pipe .load_lora_weights (os .path .join (tmpdirname , "pytorch_lora_weights.safetensors" ))
195+ pipe .load_lora_weights (os .path .join (tmpdirname , "pytorch_lora_weights.safetensors" ), adapter_name = "one" )
196196
197197 # Modify the state dict to exclude "x_embedder" related LoRA params.
198198 lora_state_dict = safetensors .torch .load_file (os .path .join (tmpdirname , "pytorch_lora_weights.safetensors" ))
199199 lora_state_dict_without_xembedder = {k : v for k , v in lora_state_dict .items () if "x_embedder" not in k }
200200
201- pipe .unload_lora_weights ( )
202- pipe .load_lora_weights ( lora_state_dict_without_xembedder )
201+ pipe .load_lora_weights ( lora_state_dict_without_xembedder , adapter_name = "two" )
202+ pipe .set_adapters ([ "one" , "two" ] )
203203 self .assertTrue (check_if_lora_correctly_set (pipe .transformer ), "Lora not correctly set in transformer" )
204204 images_lora_with_absent_keys = pipe (** inputs , generator = torch .manual_seed (0 )).images
205205
You can’t perform that action at this time.
0 commit comments