Skip to content

Commit b6a8bb1

Browse files
authored
Fix merge
1 parent 91bb35b commit b6a8bb1

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

modules/hypernetworks/hypernetwork.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ def __init__(self, dim, state_dict=None, layer_structure=None, activation_func=N
6060
linears.append(torch.nn.LayerNorm(int(dim * layer_structure[i+1])))
6161

6262
# Add dropout except last layer
63-
if use_dropout and i < len(layer_structure) - 2:
63+
if use_dropout and i < len(layer_structure) - 3:
6464
linears.append(torch.nn.Dropout(p=0.3))
6565

6666
self.linear = torch.nn.Sequential(*linears)
@@ -126,7 +126,7 @@ class Hypernetwork:
126126
filename = None
127127
name = None
128128

129-
def __init__(self, name=None, enable_sizes=None, layer_structure=None, activation_func=None, weight_init=None, add_layer_norm=False, use_dropout=False, activate_output=False)
129+
def __init__(self, name=None, enable_sizes=None, layer_structure=None, activation_func=None, weight_init=None, add_layer_norm=False, use_dropout=False, activate_output=False):
130130
self.filename = None
131131
self.name = name
132132
self.layers = {}

0 commit comments

Comments
 (0)