From 98cda213c6a09ba332673f3ca4db05d871aa1784 Mon Sep 17 00:00:00 2001 From: junsong Date: Thu, 2 Jan 2025 07:52:17 -0800 Subject: [PATCH] fix pe bug for Sana --- src/diffusers/models/transformers/sana_transformer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/diffusers/models/transformers/sana_transformer.py b/src/diffusers/models/transformers/sana_transformer.py index 027ab5fecefd..bc3877627529 100644 --- a/src/diffusers/models/transformers/sana_transformer.py +++ b/src/diffusers/models/transformers/sana_transformer.py @@ -250,7 +250,6 @@ def __init__( inner_dim = num_attention_heads * attention_head_dim # 1. Patch Embedding - interpolation_scale = interpolation_scale if interpolation_scale is not None else max(sample_size // 64, 1) self.patch_embed = PatchEmbed( height=sample_size, width=sample_size, @@ -258,6 +257,7 @@ def __init__( in_channels=in_channels, embed_dim=inner_dim, interpolation_scale=interpolation_scale, + pos_embed_type="sincos" if interpolation_scale is not None else None, ) # 2. Additional condition embeddings