We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent c8afd1c commit 6c0c72dCopy full SHA for 6c0c72d
tests/lora/utils.py
@@ -2309,7 +2309,7 @@ def test_lora_adapter_metadata_save_load_inference(self, lora_alpha):
2309
pipe = self.pipeline_class(**components).to(torch_device)
2310
_, _, inputs = self.get_dummy_inputs(with_generator=False)
2311
2312
- output_no_lora = self.cached_non_lora_outputs(scheduler_cls.__name__)
+ output_no_lora = self.cached_non_lora_outputs[scheduler_cls.__name__]
2313
self.assertTrue(output_no_lora.shape == self.output_shape)
2314
2315
pipe, _ = self.add_adapters_to_pipeline(
0 commit comments