-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathgradient_maker.py
More file actions
138 lines (114 loc) · 3.71 KB
/
gradient_maker.py
File metadata and controls
138 lines (114 loc) · 3.71 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
import math
class Values:
def __init__(self, data, _children=(), _op = "", label = "" ):#label=""):
self.data = data
self._prev = set(_children)
self._op = _op
self.grad = 0.0
self._backward = lambda: None
self.label = label
def __repr__(self):
return f"the value data is: {self.data}"
def __add__(self, other):
other = other if isinstance(other, Values) else Values(other)
out = Values(self.data + other.data, (self, other),"+")
def _backward():
self.grad += 1.0 * out.grad
other.grad += 1.0 * out.grad
out._backward = _backward
return out
def __mul__(self, other):
other = other if isinstance(other, Values) else Values(other)
out = Values(self.data * other.data, (self, other),"*")
def _backward():
self.grad += out.grad * other.data
other.grad += out.grad * self.data
out._backward = _backward
return out
def tanh(self):
x = self.data
t = (math.exp(2*x) -1) / (math.exp(2*x)+1)
out = Values(t, (self,), "tanh")
def _backward():
self.grad += out.grad * (1 - out.data ** 2)
out._backward = _backward
return out
def exp(self):
x = self.data
out = Values(math.exp(x), (self,), "exp")
def _backward():
self.grad += out.grad * out.data
out._backward = _backward
return out
def __pow__(self,other):
assert isinstance(other, (int, float))
out = Values(self.data**other, (self,), "pow")
def _backward ():
self.grad += (other * self.data **(other-1) )* out.grad
out._backward = _backward
return out
def relu (self):
x = self.data
t = x if x>0 else 0.05*x
out = Values(t, (self,), "relu")
def _backward ():
self.grad += out.grad * (1 if x>0 else 0)
out._backward = _backward
return out
def sigmoid(self):
x = self.data
t = 1/(1+math.exp(-x))
out = Values(t, (self,),"sigmoid")
def _backward():
self.grad += out.grad * (t*(1-t))
out._backward = _backward
return out
def swish(self):
x = self.data
sigmoid = 1/(1+math.exp(-x))
t = x * sigmoid
out = Values(t, (self,),'swish')
def _backward():
self.grad += out.grad * (sigmoid + t*(1-sigmoid))
out._backward = _backward
return out
def __radd__(self, other):
return self + other
def __sub__(self, other):
return self + (-other)
def __rsub__(self,other):
return other + (-self)
def __neg__(self):
return self * -1
def __truediv__(self, other):
return (self * other **-1)
def __rmul__(self,other):
return self * other
def backward(self):
topo_order = []
visited = set()
def topo(v):
if v not in visited :
visited.add(v)
for node in v._prev:
topo(node)
topo_order.append(v)
topo(self)
self.grad = 1
for nodes in reversed(topo_order):
nodes._backward()
# inputs x1,x2
# x1 = Values(2.0, label='x1')
# x2 = Values(0.0, label='x2')
# # weights w1,w2
# w1 = Values(-3.0, label='w1')
# w2 = Values(1.0, label='w2')
# # bias of the neuron
# b = Values(6.8813735870195432, label='b')
# # x1*w1 + x2*w2 + b
# x1w1 = x1*w1; x1w1.label = 'x1*w1'
# x2w2 = x2*w2; x2w2.label = 'x2*w2'
# x1w1x2w2 = x1w1 + x2w2; x1w1x2w2.label = 'x1*w1 + x2*w2'
# n = x1w1x2w2 + b; n.label = 'n'
# o = n.tanh(); o.label = 'o'
# o.backward()