Skip to content

Commit 3add55a

Browse files
committed
address #73
1 parent dea7ba1 commit 3add55a

File tree

2 files changed

+4
-3
lines changed

2 files changed

+4
-3
lines changed

alphafold3_pytorch/attention.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -178,7 +178,8 @@ def __init__(
178178
window_size = None,
179179
num_memory_kv: int = 0,
180180
enable_attn_softclamp = False,
181-
attn_softclamp_value = 50.
181+
attn_softclamp_value = 50.,
182+
init_gate_bias = -2.
182183
):
183184
super().__init__()
184185
"""
@@ -224,7 +225,7 @@ def __init__(
224225
if gate_output:
225226
gate_linear = nn.Linear(dim, dim_inner)
226227
nn.init.zeros_(gate_linear.weight)
227-
nn.init.constant_(gate_linear.bias, 1.)
228+
nn.init.constant_(gate_linear.bias, init_gate_bias)
228229

229230
self.to_gates = gate_linear
230231

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "alphafold3-pytorch"
3-
version = "0.1.114"
3+
version = "0.1.115"
44
description = "Alphafold 3 - Pytorch"
55
authors = [
66
{ name = "Phil Wang", email = "[email protected]" }

0 commit comments

Comments
 (0)