@@ -91,7 +91,7 @@ def __init__(self, threshold: float, value: float, inplace: bool = False) -> Non
9191 def forward (self , input : Tensor ) -> Tensor :
9292 return F .threshold (input , self .threshold , self .value , self .inplace )
9393
94- def extra_repr (self ):
94+ def extra_repr (self ) -> str :
9595 inplace_str = ", inplace=True" if self .inplace else ""
9696 return f"threshold={ self .threshold } , value={ self .value } { inplace_str } "
9797
@@ -127,7 +127,7 @@ class ReLU(Module):
127127 __constants__ = ["inplace" ]
128128 inplace : bool
129129
130- def __init__ (self , inplace : bool = False ):
130+ def __init__ (self , inplace : bool = False ) -> None :
131131 super ().__init__ ()
132132 self .inplace = inplace
133133
@@ -185,7 +185,7 @@ class RReLU(Module):
185185
186186 def __init__ (
187187 self , lower : float = 1.0 / 8 , upper : float = 1.0 / 3 , inplace : bool = False
188- ):
188+ ) -> None :
189189 super ().__init__ ()
190190 self .lower = lower
191191 self .upper = upper
@@ -194,7 +194,7 @@ def __init__(
194194 def forward (self , input : Tensor ) -> Tensor :
195195 return F .rrelu (input , self .lower , self .upper , self .training , self .inplace )
196196
197- def extra_repr (self ):
197+ def extra_repr (self ) -> str :
198198 inplace_str = ", inplace=True" if self .inplace else ""
199199 return f"lower={ self .lower } , upper={ self .upper } { inplace_str } "
200200
@@ -297,7 +297,7 @@ class ReLU6(Hardtanh):
297297 >>> output = m(input)
298298 """
299299
300- def __init__ (self , inplace : bool = False ):
300+ def __init__ (self , inplace : bool = False ) -> None :
301301 super ().__init__ (0.0 , 6.0 , inplace )
302302
303303 def extra_repr (self ) -> str :
@@ -426,7 +426,7 @@ class SiLU(Module):
426426 __constants__ = ["inplace" ]
427427 inplace : bool
428428
429- def __init__ (self , inplace : bool = False ):
429+ def __init__ (self , inplace : bool = False ) -> None :
430430 super ().__init__ ()
431431 self .inplace = inplace
432432
@@ -465,7 +465,7 @@ class Mish(Module):
465465 __constants__ = ["inplace" ]
466466 inplace : bool
467467
468- def __init__ (self , inplace : bool = False ):
468+ def __init__ (self , inplace : bool = False ) -> None :
469469 super ().__init__ ()
470470 self .inplace = inplace
471471
@@ -1118,7 +1118,7 @@ def __init__(
11181118
11191119 self ._reset_parameters ()
11201120
1121- def _reset_parameters (self ):
1121+ def _reset_parameters (self ) -> None :
11221122 if self ._qkv_same_embed_dim :
11231123 xavier_uniform_ (self .in_proj_weight )
11241124 else :
@@ -1517,7 +1517,7 @@ def __init__(
15171517 self .weight = Parameter (torch .empty (num_parameters , ** factory_kwargs ))
15181518 self .reset_parameters ()
15191519
1520- def reset_parameters (self ):
1520+ def reset_parameters (self ) -> None :
15211521 torch .nn .init .constant_ (self .weight , self .init )
15221522
15231523 def forward (self , input : Tensor ) -> Tensor :
@@ -1619,7 +1619,7 @@ def __setstate__(self, state):
16191619 def forward (self , input : Tensor ) -> Tensor :
16201620 return F .softmin (input , self .dim , _stacklevel = 5 )
16211621
1622- def extra_repr (self ):
1622+ def extra_repr (self ) -> str :
16231623 return f"dim={ self .dim } "
16241624
16251625
@@ -1754,5 +1754,5 @@ def __setstate__(self, state):
17541754 def forward (self , input : Tensor ) -> Tensor :
17551755 return F .log_softmax (input , self .dim , _stacklevel = 5 )
17561756
1757- def extra_repr (self ):
1757+ def extra_repr (self ) -> str :
17581758 return f"dim={ self .dim } "
0 commit comments