forked from IITGuwahati-AI/Week-2-April-5-Submissions
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathgradient_descent.py
More file actions
29 lines (29 loc) · 895 Bytes
/
gradient_descent.py
File metadata and controls
29 lines (29 loc) · 895 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
import numpy as np
def compute_cost_function (m,t0,t1,x,y):
return 1/2/m*sum([(t0 + t1* np.asarray([x[i]]) - y[i])**2 for i in range(m)])
def gradient_descent(alpha,x,y,max_iter):
converged=False
iter=0
m=x.shape[0]
t0=0
t1=0
J=compute_cost_function(m,t0,t1,x,y)
while not converged:
grad0=1/m*sum[(t0+t1*np.asarray(x[i])-y[i]for i in range(m))]
grad1=1/m*sum([(t0 + t1*np.asarray([x[i]]) - y[i])*np.asarray([x[i]]) for i in range(m)])
temp0=t0-alpha*grad0
temp1=t1-alpha*grad1
t0=temp0
t1=temp1
e=compute_cost_function(m,t0,t1,x,y)
J=e
iter+=1
if iter==max_iter:
print('maximum iterations occured')
converged=True
return (t0,t1)
data=np.genfromtxt('http://cs229.stanford.edu/ps/ps1/logistic_x.txt',delimiter=' ')
x=data[:,:]
data=np.genfromtxt('http://cs229.stanford.edu/ps/ps1/logistic_y.txt',delimiter=' ')
y=data[:,:]
gradient_descent(0.1,x,y,1500)