File tree Expand file tree Collapse file tree 1 file changed +2
-2
lines changed Expand file tree Collapse file tree 1 file changed +2
-2
lines changed Original file line number Diff line number Diff line change @@ -195,10 +195,10 @@ def test_lora_expansion_works_for_absent_keys(self):
195195 # Modify the state dict to exclude "x_embedder" related LoRA params.
196196 lora_state_dict = safetensors .torch .load_file (os .path .join (tmpdirname , "pytorch_lora_weights.safetensors" ))
197197 lora_state_dict_without_xembedder = {k : v for k , v in lora_state_dict .items () if "x_embedder" not in k }
198- pipe .load_lora_weights (lora_state_dict_without_xembedder , adapter_name = "two " )
198+ pipe .load_lora_weights (lora_state_dict_without_xembedder , adapter_name = "one " )
199199
200200 # Load state dict with `x_embedder`.
201- pipe .load_lora_weights (os .path .join (tmpdirname , "pytorch_lora_weights.safetensors" ), adapter_name = "one " )
201+ pipe .load_lora_weights (os .path .join (tmpdirname , "pytorch_lora_weights.safetensors" ), adapter_name = "two " )
202202
203203 pipe .set_adapters (["one" , "two" ])
204204 self .assertTrue (check_if_lora_correctly_set (pipe .transformer ), "Lora not correctly set in transformer" )
You can’t perform that action at this time.
0 commit comments