Skip to content

Commit c541d74

Browse files
committed
tighten test
1 parent d44f39c commit c541d74

File tree

2 files changed

+4
-1
lines changed

2 files changed

+4
-1
lines changed

src/diffusers/loaders/lora_pipeline.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2284,7 +2284,6 @@ def unload_lora_weights(self):
22842284
transformer._transformer_norm_layers = None
22852285

22862286
if getattr(transformer, "_overwritten_params", None) is not None:
2287-
print(f"{transformer._overwritten_params.keys()=}")
22882287
overwritten_params = transformer._overwritten_params
22892288
module_names = set()
22902289

tests/lora/test_lora_layers_flux.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -479,6 +479,10 @@ def test_lora_unload_with_parameter_expanded_shapes(self):
479479
self.assertTrue(cap_logger.out.startswith("Expanding the nn.Linear input/output features for module"))
480480

481481
control_pipe.unload_lora_weights()
482+
self.assertTrue(
483+
control_pipe.transformer.config.in_channels == num_channels_without_control,
484+
f"Expected {num_channels_without_control} channels in the modified transformer but has {control_pipe.transformer.config.in_channels=}",
485+
)
482486
loaded_pipe = FluxPipeline.from_pipe(control_pipe)
483487
self.assertTrue(
484488
loaded_pipe.transformer.config.in_channels == num_channels_without_control,

0 commit comments

Comments
 (0)