-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathGradient_checking.py
More file actions
32 lines (25 loc) · 891 Bytes
/
Gradient_checking.py
File metadata and controls
32 lines (25 loc) · 891 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
import numpy as np
def forward_propagation(x,theta):
J = np.dot(theta,x)
return J
def backward_propagation(x,theta):
dtheta =x
return dtheta
def gradient_check(x,theta,epsilon=1e-7):
theta_plus = theta + epsilon
theta_minus = theta - epsilon
J_plus =np.dot(theta_plus,x)
J_minus = np.dot(theta_minus,x)
gradapprox = (J_plus-J_minus)/(2*epsilon)
grad =x
numerator = np.linalg.norm(gradapprox-grad) # Step 1'
denominator = np.linalg.norm(gradapprox) + np.linalg.norm(grad) # Step 2'
difference = numerator/denominator
if difference < 1e-7:
print ("The gradient is correct!")
else:
print ("The gradient is wrong!")
return difference
x, theta = 2, 4
difference = gradient_check(x, theta)
print("difference = " + str(difference))