We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 5f8cde6 commit 05fc3d3Copy full SHA for 05fc3d3
tests/lora/utils.py
@@ -1766,8 +1766,7 @@ def test_simple_inference_with_text_lora_denoiser_fused_multi(
1766
pipe.set_adapters(["adapter-1"])
1767
outputs_lora_1 = pipe(**inputs, generator=torch.manual_seed(0))[0]
1768
1769
- pipe.fuse_lora(components=self.pipeline_class._lora_loadable_modules)
1770
- # pipe.fuse_lora(components=self.pipeline_class._lora_loadable_modules, adapter_names=["adapter-1"])
+ pipe.fuse_lora(components=self.pipeline_class._lora_loadable_modules, adapter_names=["adapter-1"])
1771
assert pipe.num_fused_loras == 1
1772
1773
# Fusing should still keep the LoRA layers so outpout should remain the same
0 commit comments