Skip to content

Commit b7b1abd

Browse files
committed
vace
1 parent 7530559 commit b7b1abd

File tree

1 file changed

+5
-1
lines changed

1 file changed

+5
-1
lines changed

src/diffusers/loaders/lora_pipeline.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4861,24 +4861,27 @@ def _maybe_expand_t2v_lora_for_vace(
48614861
):
48624862
target_device = transformer.device
48634863
if hasattr(transformer, 'vace_blocks'):
4864+
print("HERE 1")
48644865
inferred_rank_for_vace = None
48654866
lora_weights_dtype_for_vace = next(iter(transformer.parameters())).dtype # Fallback dtype
48664867

48674868
for k_lora_any, v_lora_tensor_any in state_dict.items():
48684869
if k_lora_any.endswith(".lora_A.weight"):
4870+
print("HERE 2")
48694871
inferred_rank_for_vace = v_lora_tensor_any.shape[0]
48704872
lora_weights_dtype_for_vace = v_lora_tensor_any.dtype
48714873
break # Found one, good enough for rank and dtype
48724874

48734875
if inferred_rank_for_vace is not None:
4876+
print("HERE 3")
48744877
# Determine if the LoRA format (as potentially modified by I2V expansion) includes bias
48754878
# This re-checks 'has_bias' based on the *current* state_dict.
48764879
current_lora_has_bias = any(".lora_B.bias" in k for k in state_dict.keys())
48774880

48784881
for i, vace_block_module_in_model in enumerate(transformer.vace_blocks):
48794882
# Specifically target proj_out as per the error message
48804883
if hasattr(vace_block_module_in_model, 'proj_out'):
4881-
4884+
print("HERE 4")
48824885
proj_out_linear_layer_in_model = vace_block_module_in_model.proj_out
48834886

48844887
vace_lora_A_key = f"vace_blocks.{i}.proj_out.lora_A.weight"
@@ -4898,6 +4901,7 @@ def _maybe_expand_t2v_lora_for_vace(
48984901

48994902
# Use 'current_lora_has_bias' to decide on padding bias for VACE blocks
49004903
if current_lora_has_bias and proj_out_linear_layer_in_model.bias is not None:
4904+
print("HERE 5")
49014905
vace_lora_B_bias_key = f"vace_blocks.{i}.proj_out.lora_B.bias"
49024906
if vace_lora_B_bias_key not in state_dict:
49034907
state_dict[vace_lora_B_bias_key] = torch.zeros_like(

0 commit comments

Comments
 (0)