Skip to content

Commit c702d4d

Browse files
authored
Fix off-by-one
1 parent 2f4c918 commit c702d4d

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

modules/hypernetworks/hypernetwork.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ def __init__(self, dim, state_dict=None, layer_structure=None, activation_func=N
4242
linears.append(torch.nn.Linear(int(dim * layer_structure[i]), int(dim * layer_structure[i+1])))
4343

4444
# Add an activation func except last layer
45-
if activation_func == "linear" or activation_func is None or i >= len(layer_structure) - 3:
45+
if activation_func == "linear" or activation_func is None or i >= len(layer_structure) - 2:
4646
pass
4747
elif activation_func in self.activation_dict:
4848
linears.append(self.activation_dict[activation_func]())
@@ -54,7 +54,7 @@ def __init__(self, dim, state_dict=None, layer_structure=None, activation_func=N
5454
linears.append(torch.nn.LayerNorm(int(dim * layer_structure[i+1])))
5555

5656
# Add dropout except last layer
57-
if use_dropout and i < len(layer_structure) - 3:
57+
if use_dropout and i < len(layer_structure) - 2:
5858
linears.append(torch.nn.Dropout(p=0.3))
5959

6060
self.linear = torch.nn.Sequential(*linears)

0 commit comments

Comments
 (0)