We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 3cb66e8 commit c87575dCopy full SHA for c87575d
src/diffusers/models/transformers/transformer_flux.py
@@ -360,7 +360,6 @@ def __init__(
360
self.norm1 = AdaLayerNormZero(dim)
361
self.norm1_context = AdaLayerNormZero(dim)
362
363
- # Use specialized FluxAttention instead of generic Attention
364
self.attn = FluxAttention(
365
query_dim=dim,
366
cross_attention_dim=None,
0 commit comments