Skip to content

Commit f5b5986

Browse files
committed
update
1 parent 51c570d commit f5b5986

File tree

1 file changed

+6
-4
lines changed

1 file changed

+6
-4
lines changed

src/diffusers/loaders/lora_pipeline.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4255,13 +4255,15 @@ def _maybe_expand_t2v_lora_for_i2v(
42554255
transformer: torch.nn.Module,
42564256
state_dict,
42574257
):
4258-
if any(k.startswith("blocks.") for k in state_dict):
4258+
4259+
if transformer.config.image_dim is not None:
4260+
return state_dict
4261+
4262+
if any(k.startswith("transformer.blocks.") for k in state_dict):
42594263
num_blocks = len({k.split("blocks.")[1].split(".")[0] for k in state_dict})
42604264
is_i2v_lora = any("add_k_proj" in k for k in state_dict) and any("add_v_proj" in k for k in state_dict)
4261-
if is_i2v_lora:
4262-
return state_dict
42634265

4264-
if transformer.config.image_dim is None:
4266+
if is_i2v_lora:
42654267
return state_dict
42664268

42674269
for i in range(num_blocks):

0 commit comments

Comments
 (0)