Skip to content

Commit 3834c16

Browse files
author
Linoy
committed
Revert "copies fix"
This reverts commit 051f534.
1 parent 051f534 commit 3834c16

File tree

1 file changed

+5
-1
lines changed

1 file changed

+5
-1
lines changed

src/diffusers/loaders/lora_pipeline.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4313,7 +4313,11 @@ def load_lora_weights(
43134313

43144314
# First, ensure that the checkpoint is a compatible one and can be successfully loaded.
43154315
state_dict = self.lora_state_dict(pretrained_model_name_or_path_or_dict, **kwargs)
4316-
4316+
# convert T2V LoRA to I2V LoRA (when loaded to Wan I2V) by adding zeros for the additional (missing) _img layers
4317+
state_dict = self._maybe_expand_t2v_lora_for_i2v(
4318+
transformer=getattr(self, self.transformer_name) if not hasattr(self, "transformer") else self.transformer,
4319+
state_dict=state_dict,
4320+
)
43174321
is_correct_format = all("lora" in key for key in state_dict.keys())
43184322
if not is_correct_format:
43194323
raise ValueError("Invalid LoRA checkpoint.")

0 commit comments

Comments
 (0)