Skip to content

Commit 944d863

Browse files
committed
adding backpropagation algorithm with the technique of regularization weigth decay
1 parent 9044c0e commit 944d863

File tree

1 file changed

+15
-15
lines changed

1 file changed

+15
-15
lines changed

neural_network/backpropagation_weight_decay.py

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -72,44 +72,44 @@ def train_network(
7272
return w_co, bias_co, w_cs, bias_cs, error
7373

7474

75-
def relu(x: np.array) -> np.array:
75+
def relu(input_: np.array) -> np.array:
7676
"""
7777
Relu activation function
7878
Hidden Layer due to it is less susceptible to vanish gradient
7979
"""
80-
for i in np.arange(0, len(x)):
81-
x[i, 0] = max(x[i, 0], 0)
82-
return x
80+
for i in np.arange(0, len(input_)):
81+
input_[i, 0] = max(input_[i, 0], 0)
82+
return input_
8383

8484

85-
def d_relu(x: np.array) -> np.array:
85+
def d_relu(input_: np.array) -> np.array:
8686
"""
8787
Relu Activation derivate function
8888
"""
89-
for i in np.arange(0, len(x)):
90-
if x[i, 0] >= 0:
91-
x[i, 0] = 1
89+
for i in np.arange(0, len(input_)):
90+
if input_[i, 0] >= 0:
91+
input_[i, 0] = 1
9292
else:
93-
x[i, 0] = 0
94-
return x
93+
input_[i, 0] = 0
94+
return input_
9595

9696

97-
def sigmoid(x: float) -> float:
97+
def sigmoid(input_: float) -> float:
9898
"""
9999
Sigmoid activation function
100100
Output layer
101101
"""
102-
return 1 / (1 + np.exp(-x))
102+
return 1 / (1 + np.exp(-input_))
103103

104104

105-
def d_sigmoid(x: float) -> float:
105+
def d_sigmoid(input_: float) -> float:
106106
"""
107107
Sigmoid activation derivate
108108
"""
109-
return sigmoid(x) ** 2 * np.exp(-x)
109+
return sigmoid(input_) ** 2 * np.exp(-input_)
110110

111111

112-
def main():
112+
def main() -> None:
113113
"""
114114
Import load_breast_cancer dataset
115115
It is a binary classification problem with 569 samples and 30 attributes

0 commit comments

Comments
 (0)