Skip to content

Commit 6dd7ff6

Browse files
武嘉涵武嘉涵
authored andcommitted
re-add configs as normal files (no LFS)
1 parent bbafd53 commit 6dd7ff6

File tree

1 file changed

+2
-3
lines changed

1 file changed

+2
-3
lines changed

src/diffusers/models/normalization.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -333,10 +333,9 @@ def __init__(
333333
eps=1e-5,
334334
bias=True,
335335
norm_type="layer_norm",
336-
use_silu: bool = True,
337336
):
338337
super().__init__()
339-
self.act = nn.SiLU() if use_silu else nn.Identity()
338+
self.silu = nn.SiLU()
340339
self.linear = nn.Linear(conditioning_embedding_dim, embedding_dim * 2, bias=bias)
341340
if norm_type == "layer_norm":
342341
self.norm = LayerNorm(embedding_dim, eps, elementwise_affine, bias)
@@ -347,7 +346,7 @@ def __init__(
347346

348347
def forward(self, x: torch.Tensor, conditioning_embedding: torch.Tensor) -> torch.Tensor:
349348
# convert back to the original dtype in case `conditioning_embedding`` is upcasted to float32 (needed for hunyuanDiT)
350-
emb = self.linear(self.act(conditioning_embedding).to(x.dtype))
349+
emb = self.linear(self.silu(conditioning_embedding).to(x.dtype))
351350
scale, shift = torch.chunk(emb, 2, dim=1)
352351
x = self.norm(x) * (1 + scale)[:, None, :] + shift[:, None, :]
353352
return x

0 commit comments

Comments
 (0)