Skip to content
Merged
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@ venv/
ENV/
env.bak/
venv.bak/
myenv/
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should be removed.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done


# Spyder project settings
.spyderproject
Expand Down
5 changes: 4 additions & 1 deletion src/diffusers/models/transformers/transformer_sd3.py
Original file line number Diff line number Diff line change
Expand Up @@ -345,7 +345,10 @@ def custom_forward(*inputs):
# controlnet residual
if block_controlnet_hidden_states is not None and block.context_pre_only is False:
interval_control = len(self.transformer_blocks) // len(block_controlnet_hidden_states)
hidden_states = hidden_states + block_controlnet_hidden_states[index_block // interval_control]
hidden_states_layer_index = index_block // interval_control
if hidden_states_layer_index >= len(block_controlnet_hidden_states):
hidden_states_layer_index = len(block_controlnet_hidden_states) - 1
hidden_states = hidden_states + block_controlnet_hidden_states[hidden_states_layer_index]

hidden_states = self.norm_out(hidden_states, temb)
hidden_states = self.proj_out(hidden_states)
Expand Down
2 changes: 1 addition & 1 deletion tests/pipelines/controlnet_sd3/test_controlnet_sd3.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def get_dummy_components(self):
sample_size=32,
patch_size=1,
in_channels=8,
num_layers=1,
num_layers=3,
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We should change this value here I think. Instead, we could make this method accept an argument like num_controlnet_layers and then leverage it as needed. WDYT?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done

attention_head_dim=8,
num_attention_heads=4,
joint_attention_dim=32,
Expand Down