diff --git a/src/diffusers/models/attention_processor.py b/src/diffusers/models/attention_processor.py index 8bba5a82bc2f..884f4a6ad67d 100644 --- a/src/diffusers/models/attention_processor.py +++ b/src/diffusers/models/attention_processor.py @@ -213,7 +213,9 @@ def __init__( self.norm_q = LpNorm(p=2, dim=-1, eps=eps) self.norm_k = LpNorm(p=2, dim=-1, eps=eps) else: - raise ValueError(f"unknown qk_norm: {qk_norm}. Should be None,'layer_norm','fp32_layer_norm','rms_norm'") + raise ValueError( + f"unknown qk_norm: {qk_norm}. Should be one of None, 'layer_norm', 'fp32_layer_norm', 'layer_norm_across_heads', 'rms_norm', 'rms_norm_across_heads', 'l2'." + ) if cross_attention_norm is None: self.norm_cross = None