Skip to content

Commit c8d4a1c

Browse files
hlkysayakpaul
andauthored
Update tests/lora/test_lora_layers_flux.py
Co-authored-by: Sayak Paul <[email protected]>
1 parent a2cdcda commit c8d4a1c

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

tests/lora/test_lora_layers_flux.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -195,10 +195,10 @@ def test_lora_expansion_works_for_absent_keys(self):
195195
# Modify the state dict to exclude "x_embedder" related LoRA params.
196196
lora_state_dict = safetensors.torch.load_file(os.path.join(tmpdirname, "pytorch_lora_weights.safetensors"))
197197
lora_state_dict_without_xembedder = {k: v for k, v in lora_state_dict.items() if "x_embedder" not in k}
198-
pipe.load_lora_weights(lora_state_dict_without_xembedder, adapter_name="two")
198+
pipe.load_lora_weights(lora_state_dict_without_xembedder, adapter_name="one")
199199

200200
# Load state dict with `x_embedder`.
201-
pipe.load_lora_weights(os.path.join(tmpdirname, "pytorch_lora_weights.safetensors"), adapter_name="one")
201+
pipe.load_lora_weights(os.path.join(tmpdirname, "pytorch_lora_weights.safetensors"), adapter_name="two")
202202

203203
pipe.set_adapters(["one", "two"])
204204
self.assertTrue(check_if_lora_correctly_set(pipe.transformer), "Lora not correctly set in transformer")

0 commit comments

Comments
 (0)