@@ -72,44 +72,44 @@ def train_network(
72
72
return w_co , bias_co , w_cs , bias_cs , error
73
73
74
74
75
- def relu (x : np .array ) -> np .array :
75
+ def relu (input_ : np .array ) -> np .array :
76
76
"""
77
77
Relu activation function
78
78
Hidden Layer due to it is less susceptible to vanish gradient
79
79
"""
80
- for i in np .arange (0 , len (x )):
81
- x [i , 0 ] = max (x [i , 0 ], 0 )
82
- return x
80
+ for i in np .arange (0 , len (input_ )):
81
+ input_ [i , 0 ] = max (input_ [i , 0 ], 0 )
82
+ return input_
83
83
84
84
85
- def d_relu (x : np .array ) -> np .array :
85
+ def d_relu (input_ : np .array ) -> np .array :
86
86
"""
87
87
Relu Activation derivate function
88
88
"""
89
- for i in np .arange (0 , len (x )):
90
- if x [i , 0 ] >= 0 :
91
- x [i , 0 ] = 1
89
+ for i in np .arange (0 , len (input_ )):
90
+ if input_ [i , 0 ] >= 0 :
91
+ input_ [i , 0 ] = 1
92
92
else :
93
- x [i , 0 ] = 0
94
- return x
93
+ input_ [i , 0 ] = 0
94
+ return input_
95
95
96
96
97
- def sigmoid (x : float ) -> float :
97
+ def sigmoid (input_ : float ) -> float :
98
98
"""
99
99
Sigmoid activation function
100
100
Output layer
101
101
"""
102
- return 1 / (1 + np .exp (- x ))
102
+ return 1 / (1 + np .exp (- input_ ))
103
103
104
104
105
- def d_sigmoid (x : float ) -> float :
105
+ def d_sigmoid (input_ : float ) -> float :
106
106
"""
107
107
Sigmoid activation derivate
108
108
"""
109
- return sigmoid (x ) ** 2 * np .exp (- x )
109
+ return sigmoid (input_ ) ** 2 * np .exp (- input_ )
110
110
111
111
112
- def main ():
112
+ def main () -> None :
113
113
"""
114
114
Import load_breast_cancer dataset
115
115
It is a binary classification problem with 569 samples and 30 attributes
0 commit comments