From aaddaf50b5e6e3681f234c26d02b3bc034737815 Mon Sep 17 00:00:00 2001 From: Samuel Tesfai Date: Mon, 4 Aug 2025 13:28:28 +0800 Subject: [PATCH 1/2] Cross attention module to Wan Attention --- src/diffusers/models/transformers/transformer_wan.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/diffusers/models/transformers/transformer_wan.py b/src/diffusers/models/transformers/transformer_wan.py index 8a18ea5f3e2a..19b37c5a1173 100644 --- a/src/diffusers/models/transformers/transformer_wan.py +++ b/src/diffusers/models/transformers/transformer_wan.py @@ -180,6 +180,7 @@ def __init__( added_kv_proj_dim: Optional[int] = None, cross_attention_dim_head: Optional[int] = None, processor=None, + is_cross_attention=None ): super().__init__() @@ -207,6 +208,8 @@ def __init__( self.add_v_proj = torch.nn.Linear(added_kv_proj_dim, self.inner_dim, bias=True) self.norm_added_k = torch.nn.RMSNorm(dim_head * heads, eps=eps) + self.is_cross_attention = cross_attention_dim_head is not None + self.set_processor(processor) def fuse_projections(self): From 97a934bdb270a439731c3c4b3bb1916d2adce8a4 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Mon, 4 Aug 2025 10:47:01 +0000 Subject: [PATCH 2/2] Apply style fixes --- src/diffusers/models/transformers/transformer_wan.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/diffusers/models/transformers/transformer_wan.py b/src/diffusers/models/transformers/transformer_wan.py index 19b37c5a1173..42b482b6f163 100644 --- a/src/diffusers/models/transformers/transformer_wan.py +++ b/src/diffusers/models/transformers/transformer_wan.py @@ -180,7 +180,7 @@ def __init__( added_kv_proj_dim: Optional[int] = None, cross_attention_dim_head: Optional[int] = None, processor=None, - is_cross_attention=None + is_cross_attention=None, ): super().__init__()