We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 6bf67e7 commit b32bf00Copy full SHA for b32bf00
src/diffusers/models/transformers/sana_transformer.py
@@ -250,11 +250,7 @@ def __init__(
250
inner_dim = num_attention_heads * attention_head_dim
251
252
# 1. Patch Embedding
253
- interpolation_scale = (
254
- interpolation_scale
255
- if interpolation_scale is not None
256
- else max(sample_size // 64, 1)
257
- )
+ interpolation_scale = interpolation_scale if interpolation_scale is not None else max(sample_size // 64, 1)
258
self.patch_embed = PatchEmbed(
259
height=sample_size,
260
width=sample_size,
0 commit comments