We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 3516159 commit 9059b37Copy full SHA for 9059b37
src/diffusers/models/transformers/transformer_flux.py
@@ -493,7 +493,7 @@ def forward(
493
494
if joint_attention_kwargs is not None and "ip_adapter_image_embeds" in joint_attention_kwargs:
495
ip_adapter_image_embeds = joint_attention_kwargs.pop("ip_adapter_image_embeds")
496
- ip_hidden_states = self.transformer.encoder_hid_proj(ip_adapter_image_embeds)
+ ip_hidden_states = self.encoder_hid_proj(ip_adapter_image_embeds)
497
joint_attention_kwargs.update({"ip_hidden_states": ip_hidden_states})
498
499
for index_block, block in enumerate(self.transformer_blocks):
0 commit comments