Skip to content

Commit 36fea4e

Browse files
committed
fix
1 parent c5a753a commit 36fea4e

File tree

1 file changed

+4
-5
lines changed

1 file changed

+4
-5
lines changed

src/diffusers/loaders/lora_pipeline.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4831,10 +4831,9 @@ def _maybe_expand_t2v_lora_for_i2v(
48314831

48324832
if ref_key_lora_A not in state_dict or ref_key_lora_B not in state_dict:
48334833
continue
4834-
lora_dtype = ref_lora_A_weight.dtype
48354834

4836-
state_dict[f"transformer.blocks.{i}.attn2.{c}.lora_A.weight"] = torch.zeros(
4837-
ref_lora_A_weight.shape, device=target_device, dtype=lora_dtype
4835+
state_dict[f"transformer.blocks.{i}.attn2.{c}.lora_A.weight"] = torch.zeros_like(
4836+
ref_lora_A_weight.shape, device=target_device
48384837
)
48394838
state_dict[f"transformer.blocks.{i}.attn2.{c}.lora_B.weight"] = torch.zeros_like(
48404839
state_dict[f"transformer.blocks.{i}.attn2.to_k.lora_B.weight"], device=target_device
@@ -4846,8 +4845,8 @@ def _maybe_expand_t2v_lora_for_i2v(
48464845
ref_key_lora_B_bias = f"transformer.blocks.{i}.attn2.to_k.lora_B.bias"
48474846
if has_bias and ref_key_lora_B_bias in state_dict:
48484847
ref_lora_B_bias_tensor = state_dict[ref_key_lora_B_bias]
4849-
state_dict[f"transformer.blocks.{i}.attn2.{diffusers_name_part}.lora_B.bias"] = torch.zeros(
4850-
ref_lora_B_bias_tensor.shape, device=target_device, dtype=lora_dtype
4848+
state_dict[f"transformer.blocks.{i}.attn2.{diffusers_name_part}.lora_B.bias"] = torch.zeros_like(
4849+
ref_lora_B_bias_tensor.shape, device=target_device,
48514850
)
48524851

48534852
return state_dict

0 commit comments

Comments
 (0)