We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent d38f69e commit 1066fe4Copy full SHA for 1066fe4
src/diffusers/models/attention_processor.py
@@ -539,7 +539,10 @@ def forward(
539
# For standard processors that are defined here, `**cross_attention_kwargs` is empty
540
541
attn_parameters = set(inspect.signature(self.processor.__call__).parameters.keys())
542
- unused_kwargs = [k for k, _ in cross_attention_kwargs.items() if k not in attn_parameters]
+ quiet_attn_parameters = {"ip_adapter_masks"}
543
+ unused_kwargs = [
544
+ k for k, _ in cross_attention_kwargs.items() if k not in attn_parameters and k not in quiet_attn_parameters
545
+ ]
546
if len(unused_kwargs) > 0:
547
logger.warning(
548
f"cross_attention_kwargs {unused_kwargs} are not expected by {self.processor.__class__.__name__} and will be ignored."
0 commit comments