Skip to content

Commit 62009b5

Browse files
authored
Merge branch 'main' into flux_multi_ipa
2 parents 905e8d7 + 36517f6 commit 62009b5

File tree

1 file changed

+3
-1
lines changed

1 file changed

+3
-1
lines changed

src/diffusers/models/attention_processor.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -213,7 +213,9 @@ def __init__(
213213
self.norm_q = LpNorm(p=2, dim=-1, eps=eps)
214214
self.norm_k = LpNorm(p=2, dim=-1, eps=eps)
215215
else:
216-
raise ValueError(f"unknown qk_norm: {qk_norm}. Should be None,'layer_norm','fp32_layer_norm','rms_norm'")
216+
raise ValueError(
217+
f"unknown qk_norm: {qk_norm}. Should be one of None, 'layer_norm', 'fp32_layer_norm', 'layer_norm_across_heads', 'rms_norm', 'rms_norm_across_heads', 'l2'."
218+
)
217219

218220
if cross_attention_norm is None:
219221
self.norm_cross = None

0 commit comments

Comments
 (0)