Skip to content

Commit 5b0a88b

Browse files
hlkyyiyixuxu
andauthored
Apply suggestions from code review
Co-authored-by: YiYi Xu <[email protected]>
1 parent 2537016 commit 5b0a88b

File tree

1 file changed

+2
-4
lines changed

1 file changed

+2
-4
lines changed

src/diffusers/models/attention_processor.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -575,7 +575,7 @@ def forward(
575575
# For standard processors that are defined here, `**cross_attention_kwargs` is empty
576576

577577
attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys())
578-
quiet_attn_parameters = {"ip_adapter_masks", "image_projection"}
578+
quiet_attn_parameters = {"ip_adapter_masks", "ip_hidden_states"}
579579
unused_kwargs = [
580580
k for k, _ in cross_attention_kwargs.items() if k not in attn_parameters and k not in quiet_attn_parameters
581581
]
@@ -2698,7 +2698,7 @@ def __call__(
26982698
encoder_hidden_states: torch.FloatTensor = None,
26992699
attention_mask: Optional[torch.FloatTensor] = None,
27002700
image_rotary_emb: Optional[torch.Tensor] = None,
2701-
image_projection: Optional[List[torch.Tensor]] = None,
2701+
ip_hidden_states: Optional[List[torch.Tensor]] = None,
27022702
ip_adapter_masks: Optional[torch.Tensor] = None,
27032703
) -> torch.FloatTensor:
27042704
batch_size, _, _ = hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape
@@ -2770,8 +2770,6 @@ def __call__(
27702770
encoder_hidden_states = attn.to_add_out(encoder_hidden_states)
27712771

27722772
# IP-adapter
2773-
ip_hidden_states = image_projection
2774-
27752773
ip_query = hidden_states_query_proj
27762774
ip_attn_output = None
27772775
# for ip-adapter

0 commit comments

Comments
 (0)