We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent efa33e3 commit 83701deCopy full SHA for 83701de
tests/lora/utils.py
@@ -385,7 +385,7 @@ def test_low_cpu_mem_usage_with_loading(self):
385
386
self.assertTrue(os.path.isfile(os.path.join(tmpdirname, "pytorch_lora_weights.bin")))
387
pipe.unload_lora_weights()
388
- pipe.load_lora_weights(os.path.join(tmpdirname, "pytorch_lora_weights.bin"))
+ pipe.load_lora_weights(os.path.join(tmpdirname, "pytorch_lora_weights.bin"), low_cpu_mem_usage=False)
389
390
for module_name, module in modules_to_save.items():
391
self.assertTrue(check_if_lora_correctly_set(module), f"Lora not correctly set in {module_name}")
0 commit comments