Skip to content

Commit 1e6b1c5

Browse files
committed
remove rmsnorm assert
1 parent a73cb39 commit 1e6b1c5

File tree

2 files changed

+0
-6
lines changed

2 files changed

+0
-6
lines changed

src/diffusers/models/transformers/transformer_chroma.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -235,7 +235,6 @@ def __init__(
235235
out_dim=dim,
236236
bias=True,
237237
processor=processor,
238-
qk_norm="rms_norm",
239238
eps=1e-6,
240239
pre_only=True,
241240
)
@@ -296,7 +295,6 @@ def __init__(
296295
context_pre_only=False,
297296
bias=True,
298297
processor=FluxAttnProcessor(),
299-
qk_norm=qk_norm,
300298
eps=eps,
301299
)
302300

src/diffusers/models/transformers/transformer_flux.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -277,7 +277,6 @@ def __init__(
277277
dim_head: int = 64,
278278
dropout: float = 0.0,
279279
bias: bool = False,
280-
qk_norm: Optional[str] = None,
281280
added_kv_proj_dim: Optional[int] = None,
282281
added_proj_bias: Optional[bool] = True,
283282
out_bias: bool = True,
@@ -289,7 +288,6 @@ def __init__(
289288
processor=None,
290289
):
291290
super().__init__()
292-
assert qk_norm == "rms_norm", "Flux uses RMSNorm"
293291

294292
self.head_dim = dim_head
295293
self.inner_dim = out_dim if out_dim is not None else dim_head * heads
@@ -375,7 +373,6 @@ def __init__(self, dim: int, num_attention_heads: int, attention_head_dim: int,
375373
out_dim=dim,
376374
bias=True,
377375
processor=processor,
378-
qk_norm="rms_norm",
379376
eps=1e-6,
380377
pre_only=True,
381378
)
@@ -431,7 +428,6 @@ def __init__(
431428
context_pre_only=False,
432429
bias=True,
433430
processor=FluxAttnProcessor(),
434-
qk_norm=qk_norm,
435431
eps=eps,
436432
)
437433

0 commit comments

Comments
 (0)