@@ -12,23 +12,17 @@ class SigmoidModel(nn.Module):
12
12
-pytorch-and-make-your-life-simpler-ec5367895199
13
13
"""
14
14
15
- # pyre-fixme[2]: Parameter must be annotated.
16
- def __init__ (self , num_in , num_hidden , num_out ) -> None :
15
+ def __init__ (self , num_in : int , num_hidden : int , num_out : int ) -> None :
17
16
super ().__init__ ()
18
- # pyre-fixme[4]: Attribute must be annotated.
19
17
self .num_in = num_in
20
- # pyre-fixme[4]: Attribute must be annotated.
21
18
self .num_hidden = num_hidden
22
- # pyre-fixme[4]: Attribute must be annotated.
23
19
self .num_out = num_out
24
20
self .lin1 = nn .Linear (num_in , num_hidden )
25
21
self .lin2 = nn .Linear (num_hidden , num_out )
26
22
self .relu1 = nn .ReLU ()
27
23
self .sigmoid = nn .Sigmoid ()
28
24
29
- # pyre-fixme[3]: Return type must be annotated.
30
- # pyre-fixme[2]: Parameter must be annotated.
31
- def forward (self , input ):
25
+ def forward (self , input : torch .Tensor ) -> torch .Tensor :
32
26
lin1 = self .lin1 (input )
33
27
lin2 = self .lin2 (self .relu1 (lin1 ))
34
28
return self .sigmoid (lin2 )
@@ -40,14 +34,12 @@ class SoftmaxModel(nn.Module):
40
34
https://adventuresinmachinelearning.com/pytorch-tutorial-deep-learning/
41
35
"""
42
36
43
- # pyre-fixme[2]: Parameter must be annotated.
44
- def __init__ (self , num_in , num_hidden , num_out , inplace : bool = False ) -> None :
37
+ def __init__ (
38
+ self , num_in : int , num_hidden : int , num_out : int , inplace : bool = False
39
+ ) -> None :
45
40
super ().__init__ ()
46
- # pyre-fixme[4]: Attribute must be annotated.
47
41
self .num_in = num_in
48
- # pyre-fixme[4]: Attribute must be annotated.
49
42
self .num_hidden = num_hidden
50
- # pyre-fixme[4]: Attribute must be annotated.
51
43
self .num_out = num_out
52
44
self .lin1 = nn .Linear (num_in , num_hidden )
53
45
self .lin2 = nn .Linear (num_hidden , num_hidden )
@@ -56,9 +48,7 @@ def __init__(self, num_in, num_hidden, num_out, inplace: bool = False) -> None:
56
48
self .relu2 = nn .ReLU (inplace = inplace )
57
49
self .softmax = nn .Softmax (dim = 1 )
58
50
59
- # pyre-fixme[3]: Return type must be annotated.
60
- # pyre-fixme[2]: Parameter must be annotated.
61
- def forward (self , input ):
51
+ def forward (self , input : torch .Tensor ) -> torch .Tensor :
62
52
lin1 = self .relu1 (self .lin1 (input ))
63
53
lin2 = self .relu2 (self .lin2 (lin1 ))
64
54
lin3 = self .lin3 (lin2 )
@@ -72,14 +62,10 @@ class SigmoidDeepLiftModel(nn.Module):
72
62
-pytorch-and-make-your-life-simpler-ec5367895199
73
63
"""
74
64
75
- # pyre-fixme[2]: Parameter must be annotated.
76
- def __init__ (self , num_in , num_hidden , num_out ) -> None :
65
+ def __init__ (self , num_in : int , num_hidden : int , num_out : int ) -> None :
77
66
super ().__init__ ()
78
- # pyre-fixme[4]: Attribute must be annotated.
79
67
self .num_in = num_in
80
- # pyre-fixme[4]: Attribute must be annotated.
81
68
self .num_hidden = num_hidden
82
- # pyre-fixme[4]: Attribute must be annotated.
83
69
self .num_out = num_out
84
70
self .lin1 = nn .Linear (num_in , num_hidden , bias = False )
85
71
self .lin2 = nn .Linear (num_hidden , num_out , bias = False )
@@ -88,9 +74,7 @@ def __init__(self, num_in, num_hidden, num_out) -> None:
88
74
self .relu1 = nn .ReLU ()
89
75
self .sigmoid = nn .Sigmoid ()
90
76
91
- # pyre-fixme[3]: Return type must be annotated.
92
- # pyre-fixme[2]: Parameter must be annotated.
93
- def forward (self , input ):
77
+ def forward (self , input : torch .Tensor ) -> torch .Tensor :
94
78
lin1 = self .lin1 (input )
95
79
lin2 = self .lin2 (self .relu1 (lin1 ))
96
80
return self .sigmoid (lin2 )
@@ -102,14 +86,10 @@ class SoftmaxDeepLiftModel(nn.Module):
102
86
https://adventuresinmachinelearning.com/pytorch-tutorial-deep-learning/
103
87
"""
104
88
105
- # pyre-fixme[2]: Parameter must be annotated.
106
- def __init__ (self , num_in , num_hidden , num_out ) -> None :
89
+ def __init__ (self , num_in : int , num_hidden : int , num_out : int ) -> None :
107
90
super ().__init__ ()
108
- # pyre-fixme[4]: Attribute must be annotated.
109
91
self .num_in = num_in
110
- # pyre-fixme[4]: Attribute must be annotated.
111
92
self .num_hidden = num_hidden
112
- # pyre-fixme[4]: Attribute must be annotated.
113
93
self .num_out = num_out
114
94
self .lin1 = nn .Linear (num_in , num_hidden )
115
95
self .lin2 = nn .Linear (num_hidden , num_hidden )
@@ -121,9 +101,7 @@ def __init__(self, num_in, num_hidden, num_out) -> None:
121
101
self .relu2 = nn .ReLU ()
122
102
self .softmax = nn .Softmax (dim = 1 )
123
103
124
- # pyre-fixme[3]: Return type must be annotated.
125
- # pyre-fixme[2]: Parameter must be annotated.
126
- def forward (self , input ):
104
+ def forward (self , input : torch .Tensor ) -> torch .Tensor :
127
105
lin1 = self .relu1 (self .lin1 (input ))
128
106
lin2 = self .relu2 (self .lin2 (lin1 ))
129
107
lin3 = self .lin3 (lin2 )
0 commit comments