We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent c1e5ce7 commit 44d4487Copy full SHA for 44d4487
ai_edge_torch/generative/layers/scaled_dot_product_attention.py
@@ -65,7 +65,7 @@ def scaled_dot_product_attention(
65
scale=scale,
66
)
67
else:
68
- q.mul_(scale)
+ q = q * scale
69
scores = q @ k.transpose(-1, -2)
70
scores = scores / softcap
71
scores = torch.tanh(scores)
@@ -130,7 +130,7 @@ def scaled_dot_product_attention_with_hlfb(
130
131
132
133
134
135
136
0 commit comments