From e161708f4dd12425499b1e440d5ef3ae97376d7d Mon Sep 17 00:00:00 2001 From: yiyixuxu Date: Thu, 20 Mar 2025 18:03:21 +0100 Subject: [PATCH] up --- src/diffusers/models/normalization.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/src/diffusers/models/normalization.py b/src/diffusers/models/normalization.py index 383388ca543f..962ce435bdb7 100644 --- a/src/diffusers/models/normalization.py +++ b/src/diffusers/models/normalization.py @@ -550,16 +550,6 @@ def forward(self, hidden_states): hidden_states = torch_npu.npu_rms_norm(hidden_states, self.weight, epsilon=self.eps)[0] if self.bias is not None: hidden_states = hidden_states + self.bias - elif is_torch_version(">=", "2.4"): - if self.weight is not None: - # convert into half-precision if necessary - if self.weight.dtype in [torch.float16, torch.bfloat16]: - hidden_states = hidden_states.to(self.weight.dtype) - hidden_states = nn.functional.rms_norm( - hidden_states, normalized_shape=(hidden_states.shape[-1],), weight=self.weight, eps=self.eps - ) - if self.bias is not None: - hidden_states = hidden_states + self.bias else: input_dtype = hidden_states.dtype variance = hidden_states.to(torch.float32).pow(2).mean(-1, keepdim=True)