Skip to content

Commit 996e48d

Browse files
committed
Update entropy_decoding.py
remove second softmax
1 parent 26122d5 commit 996e48d

File tree

1 file changed

+2
-1
lines changed

1 file changed

+2
-1
lines changed

optillm/entropy_decoding.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,8 @@ def calculate_varentropy_logsoftmax(logits: torch.Tensor, axis: int = -1) -> Tup
2727
return entropy, varentropy
2828

2929
def calculate_attention_metrics(attention_scores: torch.Tensor) -> Dict[str, torch.Tensor]:
30-
attention_probs = F.softmax(attention_scores, dim=-1)
30+
# attention_probs = F.softmax(attention_scores, dim=-1)
31+
attention_probs = attention_scores
3132
attn_entropy = -torch.sum(attention_probs * torch.log2(torch.clamp(attention_probs, 1e-10, 1.0)), dim=-1)
3233
attn_varentropy = torch.var(attn_entropy, dim=-1)
3334

0 commit comments

Comments
 (0)