Skip to content

Commit 89a7a20

Browse files
committed
tighten test
1 parent 8d9f2e6 commit 89a7a20

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

tests/lora/test_lora_layers_flux.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -192,14 +192,14 @@ def test_lora_expansion_works_for_absent_keys(self):
192192

193193
self.assertTrue(os.path.isfile(os.path.join(tmpdirname, "pytorch_lora_weights.safetensors")))
194194
pipe.unload_lora_weights()
195-
pipe.load_lora_weights(os.path.join(tmpdirname, "pytorch_lora_weights.safetensors"))
195+
pipe.load_lora_weights(os.path.join(tmpdirname, "pytorch_lora_weights.safetensors"), adapter_name="one")
196196

197197
# Modify the state dict to exclude "x_embedder" related LoRA params.
198198
lora_state_dict = safetensors.torch.load_file(os.path.join(tmpdirname, "pytorch_lora_weights.safetensors"))
199199
lora_state_dict_without_xembedder = {k: v for k, v in lora_state_dict.items() if "x_embedder" not in k}
200200

201-
pipe.unload_lora_weights()
202-
pipe.load_lora_weights(lora_state_dict_without_xembedder)
201+
pipe.load_lora_weights(lora_state_dict_without_xembedder, adapter_name="two")
202+
pipe.set_adapters(["one", "two"])
203203
self.assertTrue(check_if_lora_correctly_set(pipe.transformer), "Lora not correctly set in transformer")
204204
images_lora_with_absent_keys = pipe(**inputs, generator=torch.manual_seed(0)).images
205205

0 commit comments

Comments
 (0)