We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 9a48fdf commit 381452eCopy full SHA for 381452e
chebai/models/ffn.py
@@ -20,10 +20,15 @@ def __init__(
20
**kwargs
21
):
22
super().__init__(**kwargs)
23
+ self.input_size = input_size
24
+ self.hidden_layers = hidden_layers
25
+
26
+ def setup(self, stage: str) -> None:
27
+ super().setup(stage)
28
29
layers = []
- current_layer_input_size = input_size
- for hidden_dim in hidden_layers:
30
+ current_layer_input_size = self.input_size
31
+ for hidden_dim in self.hidden_layers:
32
layers.append(MLPBlock(current_layer_input_size, hidden_dim))
33
layers.append(Residual(MLPBlock(hidden_dim, hidden_dim)))
34
current_layer_input_size = hidden_dim
0 commit comments