@@ -19,6 +19,15 @@ def train_network(
1919
2020 Explanation here (Available just in Spanish):
2121 https://drive.google.com/file/d/1QTEbRVgevfK8QJ30tWcEbaNbBaKnvGWv/view?usp=sharing
22+
23+ >>> import numpy as np
24+ >>> x_train = np.array([[0.1, 0.2], [0.4, 0.6]])
25+ >>> y_train = np.array([[1], [0]])
26+ >>> neurons = 2
27+ >>> epochs = 10
28+ >>> result = train_network(neurons, x_train, y_train, epochs)
29+ >>> all(part is not None for part in result)
30+ True
2231 """
2332 mu = 0.2
2433 lambda_ = 1e-4
@@ -76,35 +85,47 @@ def relu(input_: np.array) -> np.array:
7685 """
7786 Relu activation function
7887 Hidden Layer due to it is less susceptible to vanish gradient
88+
89+ >>> relu(np.array([[0, -1, 2, 3, 0], [0, -1, -2, -3, 5]]))
90+ array([[0, 0, 2, 3, 0],
91+ [0, 0, 0, 0, 5]])
7992 """
80- for i in np .arange (0 , len (input_ )):
81- input_ [i , 0 ] = max (input_ [i , 0 ], 0 )
82- return input_
93+ return np .maximum (input_ , 0 )
8394
8495
8596def d_relu (input_ : np .array ) -> np .array :
8697 """
8798 Relu Activation derivate function
99+ >>> d_relu(np.array([[0, -1, 2, 3, 0], [0, -1, -2, -3, 5]]))
100+ array([[1, 0, 1, 1, 1],
101+ [1, 0, 0, 0, 1]])
88102 """
89103 for i in np .arange (0 , len (input_ )):
90- if input_ [i , 0 ] >= 0 :
91- input_ [i , 0 ] = 1
92- else :
93- input_ [i , 0 ] = 0
104+ for j in np .arange (0 , len (input_ [i ])):
105+ if input_ [i , j ] >= 0 :
106+ input_ [i , j ] = 1
107+ else :
108+ input_ [i , j ] = 0
94109 return input_
95110
96111
97112def sigmoid (input_ : float ) -> float :
98113 """
99114 Sigmoid activation function
100115 Output layer
116+
117+ >>> sigmoid(4)
118+ 0.9820137900379085
101119 """
102120 return 1 / (1 + np .exp (- input_ ))
103121
104122
105123def d_sigmoid (input_ : float ) -> float :
106124 """
107125 Sigmoid activation derivate
126+
127+ >>> d_sigmoid(4)
128+ 0.017662706213291114
108129 """
109130 return sigmoid (input_ ) ** 2 * np .exp (- input_ )
110131
@@ -119,7 +140,7 @@ def main() -> None:
119140
120141 Before train the neural network, the data is normalized to [0 1] interval
121142
122- The function trainNetwork () returns the weight and bias matrix to apply the
143+ The function train_network () returns the weight and bias matrix to apply the
123144 transfer function to predict the output
124145 """
125146
@@ -137,7 +158,8 @@ def main() -> None:
137158 y_train = target [0 :train ]
138159 y_test = target [train :]
139160
140- epochs = 50
161+ # play with epochs and neuron numbers
162+ epochs = 5
141163 neurons = 5
142164 w_co , bias_co , w_cs , bias_cs , error = train_network (
143165 neurons , x_train , y_train , epochs
@@ -152,4 +174,7 @@ def main() -> None:
152174
153175
154176if __name__ == "__main__" :
177+ import doctest
178+
179+ doctest .testmod ()
155180 main ()
0 commit comments